Is there a way to save the data in amazon aws neptune db using node js?
I am running this code on a lambda.
I made the connection to neptune db using the below code.
const gremlin = require('gremlin');
const DriverRemoteConnection = gremlin.driver.DriverRemoteConnection;
const Graph = gremlin.structure.Graph;
dc = new DriverRemoteConnection('endpoint',{});
const graph = new Graph();
const g = graph.traversal().withRemote(dc);
Here's a JavaScript Lambda function that writes data to Neptune (and wraps the write in a retry block in case of concurrent modifications). The function gets the Neptune endpoint and port from environment variables. The write query is in the query() method. It's a simple upsert example that tries to create a vertex using a randomly generated ID. If a vertex with that ID already exists, the query returns that vertex rather than creating a new one.
This example creates a single connection that persists for the lifetime of the Lambda container (rather than per invocation). There's some error checking in the retry code that recreates the connection in the case of an untoward network issue.
const gremlin = require('gremlin');
const async = require('async');
const traversal = gremlin.process.AnonymousTraversalSource.traversal;
const DriverRemoteConnection = gremlin.driver.DriverRemoteConnection;
let conn = createRemoteConnection();
let g = createGraphTraversalSource(conn);
const t = gremlin.process.t;
const __ = gremlin.process.statics;
async function query(id) {
return g.V(id)
.fold()
.coalesce(
__.unfold(),
__.addV('User').property(t.id, id)
)
.id().next();
}
exports.handler = async (event, context) => {
const id = Math.floor(Math.random() * 10000).toString();
return async.retry(
{
times: 5,
interval: 1000,
errorFilter: function (err) {
// Add filters here to determine whether error can be retried
console.warn('Determining whether retriable error: ' + err.message);
// Check for connection issues
if (err.message.startsWith('WebSocket is not open')){
console.warn('Reopening connection');
conn.close();
conn = createRemoteConnection();
g = createGraphTraversalSource(conn);
return true;
}
// Check for ConcurrentModificationException
if (err.message.includes('ConcurrentModificationException')){
console.warn('Retrying query because of ConcurrentModificationException');
return true;
}
return false;
}
},
async function (cb) {
let result = await query(id);
return result['value'];
});
};
function createRemoteConnection() {
return new DriverRemoteConnection(
connectionString(),
{
mimeType: 'application/vnd.gremlin-v2.0+json',
pingEnabled: false
});
}
function createGraphTraversalSource(conn) {
return traversal().withRemote(conn);
}
function connectionString() {
return 'wss://' +
process.env['neptuneEndpoint'] +
':' +
process.env['neptunePort'] +
'/gremlin';
}
Simple demo based off the TinkerPop documentation
const handler = async (event) => {
// add person vertex with a property name and value stephen.
await g.addV('person').property('name','stephen').next();
// fetch all vertex' and get the name properties.
const result = await g.V().values('name').toList();
console.log(result);
return {
statusCode: 201,
body: JSON.stringify({message:"Testing Gremlin!", data:result}),
};
}
Related
I have 2 queries, and corresponding functions, but while writing the resolver, I'm not sure how to store the 1st func data, then reuse it in the second one. Note: I do not want to call the function again as it will be executing again and has an inline API call. I just want to use it like a session on the global state in express js. Here's the code:
const resolvers={
getStudent:async({id})=>{
const resp=await service(id)
return resp;
},
const courseDetails:()=>{
console.log(resp)// I want to access resp object from above func., But don't want to call getStudent again
}
}
I tried context but didn't work.
You can implement a simple in-memory store.
By storing the Promise and returning it you won't need to worry about multiple requests to the same resources.
const got = require('got');
const assert = require('assert');
function studentServiceFactory(options = {}) {
const TTL = options.ttl || 60 * 60 * 5; // default 5 min ttl
const BASE_API = "https://swapi.dev/api";
const store = {};
return {
get: ({ id }) => {
if(!store[id] || store[id].timestamp + TTL < Date.now()) {
// store the promise
store[id] = {
promise: got(`${BASE_API}/people/${id}`),
timestamp: Date.now(),
};
console.log(`${BASE_API}/people/${id}`);
}
return store[id].promise;
}
}
}
const studentService = studentServiceFactory({ ttl: 1000});
const resolvers = {
studentService: studentService,
};
// test program
(async () => {
const request1 = await resolvers.studentService.get({ id: 1 });
const request2 = await resolvers.studentService.get({ id: 1 });
// Both calls will return the same promise.
assert.equal(request1, request2);
// wait for resources to get stale
setTimeout(async() => {
const request3 = await resolvers.studentService.get({ id: 1 });
assert.notEqual(request1, request3);
}, 3000);
})();
Two requests are independent of each other. The only way to share data between two requests is to persist the data somewhere. It can be a file, database, etc. In your case, you can simply call the service function again in the other resolver.
I have using redis to cache my queries. Its working fine with object but not when i get array. It gives me an error **"Parameter "obj" to Document() must be an object, got kids", **. It also happens with count query. Here is my code :
const mongoose = require("mongoose");
const redis = require("redis");
const util = require("util");
const client = redis.createClient(process.env.REDIS_URL);
client.hget = util.promisify(client.hget);
const exec = mongoose.Query.prototype.exec;
mongoose.Query.prototype.cache = async function (options = {}) {
this.useCache = true;
this.hashKey = JSON.stringify(options.key || "");
this.time = JSON.stringify(options.time || 36000);
return this;
};
mongoose.Query.prototype.exec = async function () {
if (!this.useCache) {
return exec.apply(this, arguments);
}
const key = JSON.stringify(
Object.assign({}, this.getQuery(), {
collection: this.mongooseCollection.name,
})
);
// client.flushdb(function (err, succeeded) {
// console.log(succeeded); // will be true if successfull
// });
const cacheValue = await client.hget(this.hashKey, key);
if (cacheValue) {
const doc = JSON.parse(cacheValue);
/*
this.model refers to the Class of the corresponding Mongoose Model of the query being executed, example: User,Blog
this function must return a Promise of Mongoose model objects due to the nature of the mongoose model object having other
functions attached once is created ( validate,set,get etc)
*/
console.log("Response from Redis");
console.log(doc);
console.log(Array.isArray(doc));
return Array.isArray(doc)
? doc.map((d) => new this.model(d))
: new this.model(doc);
}
//await the results of the query once executed, with any arguments that were passed on.
const result = await exec.apply(this, arguments);
client.hset(this.hashKey, key, JSON.stringify(result));
client.expire(this.hashKey, this.time);
console.log("Response from MongoDB");
return result;
};
module.exports = {
clearHash(hashKey) {
client.del(JSON.stringify(hashKey));
},
};
Data in redis - [ 'kids', 'men', 'women' ]
Query - const collectionType = await Product.find() .distinct("collectionType") .cache({ key: "COLLECTION_TYPE" });
can i anyone please tell me what i am doing wrong?
I have solved by directly returning the doc and its working fine. Not sure if it is the right way if i directly do return doc then sending data from redis only
i want to get the execution id, that GCP gave to the Cloud functions, in order to store it in a database.
this is what i want to get =>
let currId = log.label.execution_id
In order to do it i'm fetching the logs thanks to this function (inside my cloudFunctions):
const logging = new Logging();
console.log(`executed ${eId}`)
printEntryMetadata(eId, sId);
async function printEntryMetadata(eId, sId) {
const options = {
filter: `textPayload = "executed ${eId}"`
};
const [entries] = await logging.getEntries(options);
console.log('Logs:');
console.log(`textPayload = "executed ${eId}"`)
console.log(JSON.stringify(entries))
// const metadata = entries[0].metadata
console.log(`${metadata.labels.execution_id}`)
}
But the JSON.stringify(entries) return an empty array. And when i use the filter mannualy it's working...
is the cloud function unable to fetch it own logs?
This is what i've done:
exports.LogMetadata = async(executionId, scopeId, ProjectID, Logging) => {
const logging = new Logging({ProjectID});
const options = {
filter: `textPayload = "executed ${executionId}.${scopeId}"`
};
const [entries] = await logging.getEntries(options);
console.log(JSON.stringify(entries))
try {
const metadata = entries[0].metadata
console.log(`${metadata.labels.execution_id}`)
} catch (error) {
console.log("can't find the log, cause' it's the first function executed...")
}
}
The only thing that doesn't work is that i can't fetch the first log of the first esxecuted function.
Is there any way in which I can get Historian for a particular participant in hyperledger-composer using node API?
I am developing an application based on hyperledger-composer using Node APIs.I want to show the history of transaction of a particular participant in his/her profile. I have created the permission.acl for that and that is working fine in playground. But when i am accessing the historian from node API it is giving complete historian of the network. I don't know how to filter that for a participant.
you can return results from REST API calls since v0.20 to the calling client application, so something like the following would work (not tested, but you get the idea). NOTE: You could just call the REST API end (/GET Trader) direct via REST with your parameter (or whatever endpoints you create for your own business network - the example below is trade-network), rather than the example of using 'READ-ONLY' Transaction processor Endpoint described below, for returning larger result sets to your client application. See more on this in the docs
NODE JS Client using APIs:
const BusinessNetworkConnection = require('composer-client').BusinessNetworkConnection;
const rp = require('request-promise');
this.bizNetworkConnection = new BusinessNetworkConnection();
this.cardName ='admin#mynet';
this.businessNetworkIdentifier = 'mynet';
this.bizNetworkConnection.connect(this.cardName)
.then((result) => {
//You can do ANYTHING HERE eg.
})
.catch((error) => {
throw error;
});
// set up my read only transaction object - find the history of a particular Participant - note it could equally be an Asset instead !
var obj = {
"$class": "org.example.trading.MyPartHistory",
"tradeId": "P1"
};
async function callPartHistory() {
var options = {
method: 'POST',
uri: 'http://localhost:3000/api/MyPartHistory',
body: obj,
json: true
};
let results = await rp(options);
// console.log("Return value from REST API is " + results);
console.log(" ");
console.log(`PARTICIPANT HISTORY for Asset ID: ${results[0].tradeId} is: `);
console.log("=============================================");
for (const part of results) {
console.log(`${part.tradeId} ${part.name}` );
}
}
// Main
callPartHistory();
//
MODEL FILE
#commit(false)
#returns(Trader[])
transaction MyPartHistory {
o String tradeId
}
READ-ONLY TRANSACTION PROCESSOR CODE (in 'logic.js') :
/**
* Sample read-only transaction
* #param {org.example.trading.MyPartHistory} tx
* #returns {org.example.trading.Trader[]} All trxns
* #transaction
*/
async function participantHistory(tx) {
const partId = tx.tradeid;
const nativeSupport = tx.nativeSupport;
// const partRegistry = await getParticipantRegistry('org.example.trading.Trader')
const nativeKey = getNativeAPI().createCompositeKey('Asset:org.example.trading.Trader', [partId]);
const iterator = await getNativeAPI().getHistoryForKey(nativeKey);
let results = [];
let res = {done : false};
while (!res.done) {
res = await iterator.next();
if (res && res.value && res.value.value) {
let val = res.value.value.toString('utf8');
if (val.length > 0) {
console.log("#debug val is " + val );
results.push(JSON.parse(val));
}
}
if (res && res.done) {
try {
iterator.close();
}
catch (err) {
}
}
}
var newArray = [];
for (const item of results) {
newArray.push(getSerializer().fromJSON(item));
}
console.log("#debug the results to be returned are as follows: ");
return newArray; // returns something to my NodeJS client (called via REST API)
}
We're working on a cloud function that allows us to keep our bigquery and firebase database in sync. The function triggers when a place is created/updated/deleted.
Based on the trigger action (create/update/delete) we add a property called big_query_active to signal if the object exists or not. Same goes for the date.
Our current problem is that the call to big query sometimes returns an error. So that would mean that the data is not in sync anymore. How can this be prevented?
'use strict';
// Default imports.
const functions = require('firebase-functions');
const bigQuery = require('#google-cloud/bigquery');
// If you want to change the nodes to listen to REMEMBER TO change the constants below.
// The 'id' field is AUTOMATICALLY added to the values, so you CANNOT add it.
const ROOT_NODE = 'places';
const VALUES = [
'country_id',
'category_id',
'name',
'active',
'archived'
];
// This function listens to the supplied root node, but on child added/removed/changed.
// When an object is inserted/deleted/updated the appropriate action will be taken.
exports.children = functions.database.ref(ROOT_NODE + '/{id}').onWrite(event => {
const query = bigQuery();
const dataset = query.dataset('stampwallet');
const table = dataset.table(ROOT_NODE);
if (!event.data.exists() && !event.data.previous.exists()) {
return;
}
const item = event.data.exists() ? event.data.val() : event.data.previous.val();
const data = {};
data['id'] = event.params.id;
for (let index = 0; index < VALUES.length; index++) {
const key = VALUES[index];
data[key] = item[key] !== undefined ? item[key] : null;
}
data['big_query_date'] = new Date().getTime() / 1000;
data['big_query_active'] = event.data.exists();
return table.insert(data).then(() => {
return true;
}).catch((error) => {
if (error.name === 'PartialFailureError') {
console.log('A PartialFailureError happened while uploading to BigQuery...');
} else {
console.log(JSON.stringify(error));
console.log('Random error happened while uploading to BigQuery...');
}
});
});
This is the error that we (sometimes) receive
{"code":"ECONNRESET","errno":"ECONNRESET","syscall":"read"}
How could it be prevented that the data goes out of sync? Or is there a way to retry so that it always succeeds?