node.js - OrbitDB can't append data - p2p

I want to use orbit-db to create database using Nodejs but every time I create a database and try to add a new data it deletes the last element and I only can read the new one. I tried different types of databases (docs, Keyvalue, log, feed) but it's the same problem every time.
Here is my code to create a database:
async function createDb() {
// Create IPFS instance
const ipfsOptions = { repo: './ipfs', }
const ipfs = await IPFS.create(ipfsOptions)
// Create OrbitDB instance
const orbitdb = await OrbitDB.createInstance(ipfs)
// Create database instance
const db = await orbitdb.docs('IOFY')
console.log("address", db.address.toString())
const identity = db.identity
console.log("identity ", identity.toJSON())
console.log("put data")
await db.put({ _id: 'ip2', name: 'shamb0t', followers: 600 })
console.log("get Data")
const profile = db.get('')
console.log("profile = ", profile)
console.log("close")
await db.close();
console.log("disconnect")
await orbitdb.disconnect();
}
It creates the database and displays its address. This is the read function, we need to provide the address of the database as an argument:
async function readDb(fullAdress) {
console.log("read function")
// Create IPFS instance
const ipfsOptions = { repo: './ipfs', }
const ipfs = await IPFS.create(ipfsOptions)
// Create OrbitDB instance
const orbitdb = await OrbitDB.createInstance(ipfs)
const db = await orbitdb.open(fullAdress);
console.log("address", db.address.toString())
await db.load()
console.log("get data")
const value = await db.get('')
console.log("value = ", value)
console.log("close")
await db.close();
console.log("disconnect")
await orbitdb.disconnect();
}
It displays only the last element I added to the database, it's like the last element overwrites the past elements. Why does this happen?

Related

Update existing records into Odoo using Odoo-await api in NodeJS

I'm Trying to update an Odoo 15 instance with a collection database that is being retrieved from Google Firestore using an API, I succesfully retrieve the collection from Firestore but in what I'm stuck is mapping this information into the Odoo users and update their records (I have identified the fields that are stored in collection and their respective odoo fields).
This is the API
const yargs = require('yargs');
const { hideBin } = require('yargs/helpers');
const Firestore = require('#google-cloud/firestore');
const Odoo = require('odoo-await');
// Initialize app
const firebase = require('firebase-admin');
const credentials = require('./pulse-hris-update-dev-607161dcc967.json');
firebase.initializeApp({
credential: firebase.credential.cert(credentials)
});
// GLOBALS
const version = '0.1'
// HELPERS
// __jlog__
// Easier to type than console.log()
function jlog(m){
console.log(`${(new Date).toISOString()}: ` + m);
}
// __fetchProfiles()__
// z: environment object retuined by setUpEnvironment()
// returns: Array containg profile objects from Firestore.
async function fetchProfiles(){
// set up firestore connector
const start = new Date();
jlog(`${start.toISOString()}: Fetching profiles from Firestore...`);
const db = firebase.firestore();
// get all profiles
const profiles = [];
const qsnap = db.collection('profiles');
const docs = await qsnap.get();
if (docs.empty) throw new Error('Nothing retrieved from Firestore.');
docs.forEach(doc=>{
profiles.push(doc.data());
});
const stop = new Date();
jlog(`${stop.toISOString()}: Fetched ${profiles.length} profiles in ${stop-start}ms.`);
return profiles;
}
// __updatePulse()__
// z: environment object retuined by setUpEnvironment()
// profiles: array of objects returned from fetchProfiles()
async function updatePulse(profiles){
jlog('Updating Pulse...');
if (!profiles || profiles.length === 0) throw new Error('Nothing was retrieved from Firestore.');
const odoo = new Odoo({
host: "HOST",
port: PORT,
database: "DATABASE URL",
username: "USER",
password: "PASS"
});
await odoo.connect();
for (const profile of profiles) {
//search function to retrieve mail in odoo server
//if the profile is new use a create method to map and create the data
//parse function and map field in odoo
const updated = await odoo.update('hr.employee', profile.id, json_parse);
}
await odoo.disconnect();
jlog('... done.');
return true;
}
// __main__
const start = new Date;
const args = yargs(hideBin(process.argv)).argv;
fetchProfiles()
.then(profiles =>{
return updatePulse(profiles);
})
console.log(profiles)
.then(()=>{
// tidy up
const stop = new Date();
jlog(`Finished execution at: ${stop.toISOString()}.\nTotal elapsed time is ${stop-start}ms.`);
})
// END
I have an idea of how the process should go but feel like need some pointers in how to start building the fucntion UpdatePulse() that is going to handle the odoo update.
Currently I think the order should be fetch the IDS of the Odoo users using their email.
Map to the odoo fields that comes from the 'profile' array.
Update each module I want the fields updated
Any ideas or commets are appreciated
Regards

how to save data in aws neptune db using node js?

Is there a way to save the data in amazon aws neptune db using node js?
I am running this code on a lambda.
I made the connection to neptune db using the below code.
const gremlin = require('gremlin');
const DriverRemoteConnection = gremlin.driver.DriverRemoteConnection;
const Graph = gremlin.structure.Graph;
dc = new DriverRemoteConnection('endpoint',{});
const graph = new Graph();
const g = graph.traversal().withRemote(dc);
Here's a JavaScript Lambda function that writes data to Neptune (and wraps the write in a retry block in case of concurrent modifications). The function gets the Neptune endpoint and port from environment variables. The write query is in the query() method. It's a simple upsert example that tries to create a vertex using a randomly generated ID. If a vertex with that ID already exists, the query returns that vertex rather than creating a new one.
This example creates a single connection that persists for the lifetime of the Lambda container (rather than per invocation). There's some error checking in the retry code that recreates the connection in the case of an untoward network issue.
const gremlin = require('gremlin');
const async = require('async');
const traversal = gremlin.process.AnonymousTraversalSource.traversal;
const DriverRemoteConnection = gremlin.driver.DriverRemoteConnection;
let conn = createRemoteConnection();
let g = createGraphTraversalSource(conn);
const t = gremlin.process.t;
const __ = gremlin.process.statics;
async function query(id) {
return g.V(id)
.fold()
.coalesce(
__.unfold(),
__.addV('User').property(t.id, id)
)
.id().next();
}
exports.handler = async (event, context) => {
const id = Math.floor(Math.random() * 10000).toString();
return async.retry(
{
times: 5,
interval: 1000,
errorFilter: function (err) {
// Add filters here to determine whether error can be retried
console.warn('Determining whether retriable error: ' + err.message);
// Check for connection issues
if (err.message.startsWith('WebSocket is not open')){
console.warn('Reopening connection');
conn.close();
conn = createRemoteConnection();
g = createGraphTraversalSource(conn);
return true;
}
// Check for ConcurrentModificationException
if (err.message.includes('ConcurrentModificationException')){
console.warn('Retrying query because of ConcurrentModificationException');
return true;
}
return false;
}
},
async function (cb) {
let result = await query(id);
return result['value'];
});
};
function createRemoteConnection() {
return new DriverRemoteConnection(
connectionString(),
{
mimeType: 'application/vnd.gremlin-v2.0+json',
pingEnabled: false
});
}
function createGraphTraversalSource(conn) {
return traversal().withRemote(conn);
}
function connectionString() {
return 'wss://' +
process.env['neptuneEndpoint'] +
':' +
process.env['neptunePort'] +
'/gremlin';
}
Simple demo based off the TinkerPop documentation
const handler = async (event) => {
// add person vertex with a property name and value stephen.
await g.addV('person').property('name','stephen').next();
// fetch all vertex' and get the name properties.
const result = await g.V().values('name').toList();
console.log(result);
return {
statusCode: 201,
body: JSON.stringify({message:"Testing Gremlin!", data:result}),
};
}

Firestore: scheduled export

I have used the code from the Firebase documentation to schedule a backup of the data in my Firestore project in a bucket every 6 hours. See the link and the code here:
https://firebase.google.com/docs/firestore/solutions/schedule-export
const functions = require('firebase-functions');
const firestore = require('#google-cloud/firestore');
const client = new firestore.v1.FirestoreAdminClient();
// Replace BUCKET_NAME
const bucket = 'gs://BUCKET_NAME';
exports.scheduledFirestoreExport = functions.pubsub
.schedule('every 24 hours')
.onRun((context) => {
const projectId = process.env.GCP_PROJECT || process.env.GCLOUD_PROJECT;
const databaseName =
client.databasePath(projectId, '(default)');
return client.exportDocuments({
name: databaseName,
outputUriPrefix: bucket,
// Leave collectionIds empty to export all collections
// or set to a list of collection IDs to export,
// collectionIds: ['users', 'posts']
collectionIds: []
})
.then(responses => {
const response = responses[0];
console.log(`Operation Name: ${response['name']}`);
})
.catch(err => {
console.error(err);
throw new Error('Export operation failed');
});
});
Everything works well, my data is saved like I want to but nevertheless I am getting an error:
Error serializing return value: TypeError: Converting circular structure to JSON
Can someone tell me what I should change? Would be glad to get a hint.

Cloud Functions for Firestore: accessing parent collection data

Many blogs suggest to switch to Cloud Firestore because it's easy and well secured. Coming from Realtime Database and back when using Functions + RD it was easy to navigate through document triggers, like ref.parent
My setup is like this:
Users
{userid}
last_seen: "data"
{forms}
{formid}
However, i have added a document trigger with onCreate, and i want to get the value of last_seen:
exports.updateUser = functions.firestore.document('users/{userId}/forms/{formid}').onCreate((snap, context) => {
const newValue = snap.data();
console.log("test value : " + newValue.test); // works
console.log("form id: " + context.params.formid); // works
console.log("user last seen : " + newValue.last_seen); // doesn't work, can't access the parent collection data
});
I totally get the confusion with the switch to Firestore but it's almost the exact same way in this case.
In realtime, you have the snapshot:
exports.doStuff = functions.database.ref('/users/{userId}/forms/{formId}')
.onCreate((snapshot, context) => {
const ref = snapshot.ref;
const userRef = ref.parent.parent;
userRef.once('value').then(parentSnap => {
const user = parentSnap.val();
const lastSeen = user.last_seen;
});
});
In Firestore:
exports.doStuff = functions.firestore.document.onCreate('/users/{userId}/forms/{formId}')
.onCreate((snapshot, context) => {
const ref = snapshot.ref;
const userRef = ref.parent.parent;
userRef.get().then(parentSnap => {
const user = parentSnap.data();
const lastSeen = user.last_seen;
});
});
Another thing to consider is you are passing the userId in your params so you could just build your own DocumentReference (assuming you're also using firebaseAdmin)
functions.firestore.document.onCreate('/users/{userId}/forms/{formId}')
.onCreate((snapshot, context) => {
const userId = context.params.userId;
const userRef = firebaseAdmin.firestore().collection('users').doc(userId);
userRef.get().then(parentSnap => {
const user = parentSnap.data();
const lastSeen = user.last_seen;
});
});
It also allows you to decouple your logic for functions you may use often, consider it as a "helper" method: (NOTE, I switched to async/await on accident, it's a bit cleaner)
functions.firestore.document.onCreate('/users/{userId}/forms/{formId}')
.onCreate(async (snapshot, context) => {
const userId = context.params.userId;
const lastSeen = await getLastSeen(userId);
});
// == Helper Functions ==-------------------
export async getLastSeen(userId) {
if (!userId) return Promise.reject('no userId');
// User Ref
const userSnap = await firebaseAdmin.firestore().collection('users').doc(userId).get();
return userSnap.data().last_seen;
}
Now you can use getLastSeen() whenever you need it, and if you make a change you only have to adjust that one function. If it's not something you call often then don't worry about it, but I would consider maybe a getUser() helper...
In your code, snap is a DocumentSnapshot type object. As you can see from the linked API documentation, there is a ref property on that object that gets you a DocumentReference object pointing to the document that was added. That object has parent property that gives you a CollectionReference that points to the collection where the document exists, which also has a parent property. So, use these properties to navigate around your database as needed.
Get the reference where the change took place, move 2 levels up and capture data using ref.once() function:
exports.updateUser = functions.firestore.document('users/{userId}/forms/{formid}').onCreate( async (snap, context) => {
// Get the reference where the change took place
const changeRef = snap.after.ref;
// Move to grandad level (2 levels up)
const userIdRef = changeRef.parent.parent;
// Capture data
const snapshot = await userIdRef.once('value');
// Get variable
const lastSeen = snapshot.val().last_seen;
// Do your stuff...
return null;
});

Parse Server edit Relations on Object very slow

I've got the following function which works as expected on Parse Server cloud code, however it's painfully slow.
The nested for loops which are internally calling queries and save functions are undoubtedly the root cause.
How can I refactor this code so that there is some async processing or even better what methods are there to remove / edit the relations on an object, the documentation around this is very poor.
ClientLabels.applyClientLabels = async (req, res) => {
const { clients, labels } = req.params;
const user = req.user;
const objectIds = clients.map((client) => client.objectId);
const clientSaveList = [];
const clientClass = Parse.Object.extend('Clients');
const query = new Parse.Query(clientClass);
query.containedIn("objectId", objectIds);
const queryResult = await query.find({ sessionToken: user.getSessionToken() })
try {
for (const client of queryResult) {
const labelRelation = client.relation('labels');
const relatedLabels = await labelRelation.query().find({ sessionToken: user.getSessionToken() });
labelRelation.remove(relatedLabels);
for (const label of labels) {
label.className = "ClientLabels";
const labelRelationObj = Parse.Object.fromJSON(label)
labelRelation.add(labelRelationObj);
};
clientSaveList.push(client);
};
const saved = await Parse.Object.saveAll(clientSaveList, { sessionToken: user.getSessionToken() })
res.success(saved);
} catch (e) {
res.error(e);
};
}
Explanation of some weirdness:
I am having to call Parse.Object.fromJSON in order to make the client side label object a ParseObjectSubClass and allow operations on it such as adding relations.
You cannot use include on a relation query as you would with a Pointer, so there needs to be a query for relations all on it's own. An array of pointers was ruled out as there is going to be an unknown amount of labels applied.
There are a few things that can be done: (1) The creation of labels in the inner loop is invariant relative to the outer loop, so that can be done one time, at the start. (2) There's no need to query the relation if you're just going to remove the related objects. Use unset() and add to replace the relations. (3) This won't save much computation, but clientSaveList is superfluous, we can just save the query result...
ClientLabels.applyClientLabels = async (req, res) => {
const { clients, labels } = req.params;
const objectIds = clients.map((client) => client.objectId);
let labelObjects = labels.map(label => {
label.className = "ClientLabels";
return Parse.Object.fromJSON(label)
});
const query = new Parse.Query('Clients');
query.containedIn("objectId", objectIds);
const sessionToken = req.user.getSessionToken;
const queryResult = await query.find({ sessionToken: sessionToken })
try {
for (const client of queryResult) {
client.unset('labels');
client.relation('labels').add(labelObjects);
};
const saved = await Parse.Object.saveAll(queryResult, { sessionToken: sessionToken })
res.success(saved);
} catch (e) {
res.error(e);
};
}

Resources