In my electron application I create Diffie-Hellman keys via the following method:
const crypto = require('crypto');
/**
* Generate the keys and the diffie hellman key agreement object.
* #param {Integer} p The prime for Diffie Hellman Key Generation
* #param {Integer} g The generator for Diffie Hellman Key Exchange
*/
async function createSelfKey(p, g, callback) {
let returnVal = null;
if (p && g) {
returnVal = { dh: await crypto.createDiffieHellman(p, g) };
} else {
returnVal = { dh: await crypto.createDiffieHellman(2048) };
}
returnVal.keys = await returnVal.dh.generateKeys();
return callback(returnVal);
};
But the key generation is a slightly computation-heavy process thus it makes my application to freeze. An example of usage is when I try to implement this method generateCreatorKeys from the following function:
function ChatRoomStatus() {
/**
* #var {Object}
*/
const chatrooms = {};
// Some other logic
/**
* This Method fetched the creator of the Chatroom and executes a callback on it.
* #param {String} chatroom The chatroom to fetch the creator
* #param {Function} callback The callback of the chatroom.
*/
this.processCreator = (chatroom, callback) => {
const index = _.findIndex(chatrooms[chatroom].friends, (friend) => friend.creator);
return callback(chatrooms[chatroom].friends[index], index , chatrooms[chatroom] );
};
/**
* Generate keys for the Chatroom Creator:
* #param {String} chatroom The chatroom to fetch the creator
* #param {Function} callback The callback of the chatroom.
*/
this.generateCreatorKeys = (chatroom, callback) => {
return this.processCreator(chatroom, (friend, index, chatroom) => {
return createSelfKey(null, null, (cryptoValues) => {
friend.encryption = cryptoValues;
return callback(friend, index, chatroom);
});
});
};
};
An example that this method is called is:
const { xml, jid } = require('#xmpp/client');
/**
* Handling the message Exchange for group Key agreement
* #param {Function} sendMessageCallback
* #param {ChatRoomStatus} ChatroomWithParticipants
*/
function GroupKeyAgreement(sendMessageCallback, ChatroomWithParticipants) {
const self = this;
/**
* Send the Owner participant Keys into the Chatroom
*/
self.sendSelfKeys = (chatroomJid, chatroomName) => {
ChatroomWithParticipants.generateCreatorKeys(chatroomName, (creator) => {
const message = xml('message', { to: jid(chatroomJid).bare().toString()+"/"+creator.nick });
const extention = xml('x', { xmlns: 'http://pcmagas.tk/gkePlusp#intiator_key' });
extention.append(xml('p', {}, creator.encryption.dh.getPrime().toString('hex')));
extention.append(xml('g', {}, creator.encryption.dh.getGenerator().toString('hex')));
extention.append(xml('pubKey', {}, creator.encryption.keys.toString('hex')));
message.append(extention);
sendMessageCallback(message);
});
};
};
module.exports = GroupKeyAgreement;
Do you know how I can "run" the function createSelfKey in parallel/seperate thread and serve its contents via a callback? Also the code above runs on Electron's main process thus a freeze on it causes the whole application to stall for a while.
I'd take a look at https://electronjs.org/docs/tutorial/multithreading.
Electron has basically everything from the DOM and node.js plus more in it, so you have a few options. In general, they are:
Web workers (renderer process only). If you're doing this in a renderer process, you can just use plain DOM web workers. Those are run in a separate process or thread (not sure which, that's a chromium implementation detail, but it definitely won't block your UI).
It looks like node.js worker_threads (renderer process only?) are also available now in Electron. That might work as well, never used these personally.
You can always create another renderer process and use that as your separate "thread" and communicate with it via IPC. When the work is done, you just close that. You do this by creating a new, hidden BrowserWindow.
Use node.js' cluster/child_process module to spin up a new node process, and use it's built-in IPC (not Electron's) to communicate with it.
Because you're running this code in the main process and assuming you can't move it out, your only option (to my knowledge) is #3. If you're okay with adding a library, electron-remote (https://github.com/electron-userland/electron-remote#the-renderer-taskpool) has some cool functionality that let's you spin up a renderer process (or several) in the background, get the results as a promise, and then closes them for you.
The best solution I tried to your problem is the following code based upon answer:
const crypto = require('crypto');
const spawn = require('threads').spawn;
/**
* Generate the keys and the diffie hellman key agreement object.
* #param {Integer} p The prime for Diffie Hellman Key Generation
* #param {Integer} g The generator for Diffie Hellman Key Exchange
* #param {Function} callback The callback in order to provide the keys and the diffie-hellman Object.
*/
const createSelfKey = (p, g, callback) => {
const thread = spawn(function(input, done) {
const cryptot = require('crypto');
console.log(input);
const pVal = input.p;
const gVal = input.g;
let dh = null;
if (pVal && gVal) {
dh = cryptot.createDiffieHellman(pVal, gVal);
} else {
dh = cryptot.createDiffieHellman(2048);
}
const pubKey = dh.generateKeys();
const signaturePubKey = dh.generateKeys();
done({ prime: dh.getPrime().toString('hex'), generator: dh.getGenerator().toString('hex'), pubKey, signaturePubKey});
});
return thread.send({p,g}).on('message', (response) => {
callback( crypto.createDiffieHellman(response.prime, response.generator), response.pubKey, response.signaturePubKey);
thread.kill();
}).on('error', (err)=>{
console.error(err);
}).on('exit', function() {
console.log('Worker has been terminated.');
});
};
As you can see using the threads library from npm will provide you what you need. The only negative on this approach is that you cannot pass the in-thread generated objects outside the thread's scope. Also the code that is inside the function executing the thread is an some sort of an isolated one thus you may need to re-include any library you need as you can see above.
Related
I have this code where I fetch a list of elements so I can make discordjs options with the mongoose data
const items = require(‘./schema.js’)
const items1 = await items.find()
module.exports = {
name: `buy`,
timeout:15000,
/**
* #param {Client} client
* #param {Message} message
* #param {String[]} args
*/
data: new SlashCommandBuilder()
.setName('buy')
.setDescription('buy an item!')
.addStringOption(option =>
option
.setName('itemid')
.setDescription('the item you want to buy')
.setRequired(true)
/** .addChoices(
items1.forEach(item => {
option.addChoice(`<:${item.EmojiName}:${item.EmojiID}>${item.ItemName} - <:bobux:809551218217058354>${item.Price} `,`${item.ItemID}` )
})
)
*/
),
When I do this, I get the error that you can’t use await outside of async. Does anyone have any solutions/alternatives to this code?
Thanks
The situation you're facing is more like a bad-design symptom than a limitation. What you are actually trying to do is "generating Discord commands — asynchronously — based on Mongoose data".
Well, just do that in the code. Don't try to mix and export synchronously an asynchronously generated thing. Rather simply make a function that generates it :
const Item = require('./schema.js')
const itemToChoice = item => `<:${item.EmojiName}:${item.EmojiID}>${item.ItemName} - <:bobux:809551218217058354>${item.Price} `
const addItemsChoices = (option, items) => (
items.reduce((option, item) => (
option.addChoice(itemToChoice(item),`${item.ItemID}`)
), option)
)
module.exports = async () => {
const items = await Item.find()
const command = {
name: `buy`,
timeout:15000,
/**
* #param {Client} client
* #param {Message} message
* #param {String[]} args
*/
data: new SlashCommandBuilder()
.setName('buy')
.setDescription('buy an item!')
.addStringOption(option => (
addItemsChoices(
option
.setName('itemid')
.setDescription('the item you want to buy')
.setRequired(true),
items
)
))
}
return command
}
PS: for god sake, format your code
PS2: for god sake, remove smart punctuation on your computer
I have different publishers publish to a PubSub Topic. Each message has a specific key. I would like to create subscribers that only pick up the latest message for each specific key within a defined interval. In other words, I would like to have some kind of debounce implemented for my subscribers.
Example (with debounce 2 seconds)
-(x)-(y)-(x)-------(z)-(z)---(x)-----------------> [Topic with messages]
|-------|---------------|execute for x [Subscriber]
2 seconds
|---------------|execute for y [Subscriber]
2 seconds
|---|---------------|execute for z [Subscriber]
2 seconds
|---------------|execute for x [Subscriber]
2 seconds
Ordered Execution Summary:
execute for message with key: y
execute for message with key: x
execute for message with key: z
execute for message with key: x
Implementation
// index.ts
import * as pubsub from '#google-cloud/pubsub';
import * as functions from 'firebase-functions';
import AbortController from 'node-abort-controller';
exports.Debouncer = functions
.runWith({
// runtimeOptions
})
.region('REGION')
.pubsub.topic('TOPIC_NAME')
.onPublish(async (message, context) => {
const key = message.json.key;
// when an equivalent topic is being received, cancel this calculation:
const aborter = await abortHelper<any>(
'TOPIC_NAME',
(message) => message?.key === key
).catch((error) => {
console.error('Failed to init abort helper', error);
throw new Error('Failed to init abort helper');
});
await new Promise((resolve) => setTimeout(resolve, 2000));
// here, run the EXECUTION for the key, unless an abortsignal from the abortHelper was received:
// if(aborter.abortController.signal) ...
aborter.teardown();
/**
* Subscribe to the first subscription found for the specified topic. Once a
* message gets received that is matching `messageMatcher`, the returned
* AbortController reflects the abortet state. Calling the returned teardown
* will cancel the subscription.
*/
async function abortHelper<TMessage>(
topicName: string,
messageMatcher: (message: TMessage) => boolean = () => true
) {
const abortController = new AbortController();
const pubSubClient = new pubsub.PubSub();
const topic = pubSubClient.topic(topicName);
const subscription = await topic
.getSubscriptions()
.then((subscriptionsResponse) => {
// TODO use better approach to find or provide subscription
const subscription = subscriptionsResponse?.[0]?.[0];
if (!subscription) {
throw new Error('no found subscription');
}
return subscription;
});
const listener = (message: TMessage) => {
const matching = messageMatcher(message);
if (matching) {
abortController.abort();
unsubscribeFromPubSubTopicSubscription();
}
};
subscription.addListener('message', listener);
return {
teardown: () => {
unsubscribeFromPubSubTopicSubscription();
},
abortController,
};
function unsubscribeFromPubSubTopicSubscription() {
subscription.removeListener('message', listener);
}
}
});
The initial idea was to register a cloud function to the topic. This cloud function itself then subscribes to the topic as well and waits for the defined interval. If it picks up a message with the same key during the interval, it exits the cloud function. Otherwise, it runs the execution.
Running inside the firebase-emulator this worked fine. However, on production random and hard to debug issues occurred most likely due to parallel execution of the functions.
What would be the best approach to implement such a system in a scalable way? (It does not necessarily have to be with PubSub.)
I set up a simple Backendless API Service and am running it through CodeRunner. As a test, I'm simply getting a record from the database and returning it. I've tried every combination of return type definition in the class annotations that I can think of, and I've assured that the correct record exists and is being returned to the service, but I've never successfully had the record returned using the console, or via a SDK invocation. In every case, the body returned to the invocation is null. My current test uses "Object" as the return type for the getSchedule call - are database objects not objects?
Here is the entire service:
'use strict';
const { DateTime } = require("luxon");
const util = require("util");
class Scheduling {
/**
*
* #param {String} day
* #returns {Object}
*/
getSchedule( day ) {
let t = DateTime.fromISO(day).toMillis();
let q = Backendless.DataQueryBuilder.create().setWhereClause(`day = ${t}`);
Backendless.Data.of("schedules").find(q)
.then(rec => {
console.log(util.inspect(rec,3))
if (rec.length === 1) {
return rec[0]
}
else {
return {error: 404, msg: 'not found'}
}
})
}
}
Backendless.ServerCode.addService( Scheduling )
The "inspect" call indicates I am retrieving the correct record. No errors, the return status of the invocation is always 200. Obviously, I'm missing something about API service return types, please point me in the correct direction.
The problem is the response for the find method is returned after the invocation of getSchedule is complete (because the API invocation is asynchronous).
How about declaring the getSchedule with async and then await for the API invocation?
'use strict';
const { DateTime } = require("luxon");
const util = require("util");
class Scheduling {
/**
*
* #param {String} day
* #returns {Object}
*/
async getSchedule( day ) {
let t = DateTime.fromISO(day).toMillis();
let q = Backendless.DataQueryBuilder.create().setWhereClause(`day = ${t}`);
var rec = await Backendless.Data.of("schedules").find(q);
console.log(util.inspect(rec,3))
if (rec.length === 1) {
return rec[0]
}
else {
return {error: 404, msg: 'not found'}
}
}
}
Backendless.ServerCode.addService( Scheduling )
I have several repos with methods and some of these methods use transaction (.tx).
For example, in my DevicesRepository below, the 'add' method have to insert a new Device, which means:
1. Insert a System and return the ID (SystemsRepository)
2. insert the device with the returner systemId and get the new id
3. Insert other pieces (other repos) that uses the deviceId
My problem is that in that transaction I don't know how to access to the other repo methods.
I could use the other repos from my Database object (Database.systems.add, Database.OtherRepo.add, [...]), but if I do that
tx doc
When invoked on the root Database object, the method allocates the connection from the pool, executes the callback, and once finished - releases the connection back to the pool. However, when invoked inside another task or transaction, the method reuses the parent connection.
task doc
When executing more than one request at a time, one should allocate and release the connection only once, while executing all the required queries within the same connection session. More importantly, a transaction can only work within a single connection.
Thanks! :)
P.S : I can add how I initialize the DB and repos
./db/repos/devices.js
'use strict';
var Database = null, pgp = null, Collections = null;
async function add(params) {
// I can use Database.systems.add
return Database.tx('Insert-New-Device', async function(transaction) {
let device = params.data.device;
const system = await transaction.systems.add(params);
device.systemid = system.systemId;
const query = pgp.helpers.insert(device, Collections.insert);
query += " RETURNING deviceId";
device.deviceId = await transaction.one(query);
const OtherRepoInsert = await transaction.otherRepos.add(params);
device.otherRepos.id = OtherRepoInsert.otherReposId;
return device
})
.then(data => { return data; })
.catch(ex => { throw new Error(ex); });
}
function createColumnsets() { /* hidden for brevity (almost the same as the pg-promise-demo */ }
const DevicesRepository = {
add: add
};
module.exports = (db) => {
Database = db;
pgp = db.$config.pgp;
Collections = createColumnsets();
return DevicesRepository;
}
./db/repos/systems.js
'use strict';
var Database = null, pgp = null, Collections = null;
async function add(params) {
var system = params.data.system;
system.archid=2;
system.distributionid=3;
var query = pgp.helpers.insert(system, Collections.insert);
if(params.return) query += " RETURNING *";
return Database.any(query)
.then(data => { return data; })
.catch(ex => { throw new Error(ex); });
}
function createColumnsets() { /* hidden for brevity (almost the same as the pg-promise-demo */ }
const SystemsRepository = {
add: add
};
module.exports = (db) => {
Database = db;
pgp = db.$config.pgp;
Collections = createColumnsets();
return SystemsRepository;
}
I found the real problem.
If you go to my first post, you can see that each of my repo exports an initialization function :
1. which is called by the pg-promise 'extend' event
2. which takes one param : the context
3. which uses this param to initialize the 'pgp' variable in the repo with db.$config.pgp
As explained in the demo, this event occurs when the db is loaded for the first time in the appl and for every task and transaction.
In my case :
The first time the event occurs (full app initialization), the event's param 'obj' is the database context (containing $config, $pool, ...) so it works
When the event occurs for a task or transaction, the event's param 'obj' is a Task context, where $config does not exists so the event can not extend the context with my repo. An exception 'can not read property helpers of undefined' is thrown but does not appear and does not crash my app, I don't know why, maybe catched in the event. That is why I could not use my repo in the transaction.
I modified my code like it and it works :
./db/index.js
'use strict';
/* hidden for brevity */
// pg-promise initialization options:
const initOptions = {
promiseLib: promise,
extend(obj, dc) {
obj.roles = repos.Roles(obj, pgp);
obj.shells = repos.Shells(obj, pgp);
obj.systems = repos.Systems(obj, pgp);
obj.devices = repos.Devices(obj, pgp);
}
};
const pgp = require('pg-promise')(initOptions);
const db = pgp(config);
/* hidden for brevity */
./db/index.js
'use strict';
/* hidden for brevity */
// pg-promise initialization options:
const initOptions = {
promiseLib: promise,
extend(obj, dc) {
obj.roles = repos.Roles(obj, pgp);
obj.shells = repos.Shells(obj, pgp);
obj.systems = repos.Systems(obj, pgp);
obj.devices = repos.Devices(obj, pgp);
}
};
const pgp = require('pg-promise')(initOptions);
const db = pgp(config);
/* hidden for brevity */
./db/repos/{repoFiles}.js
/* hidden for brevity */
module.exports = (db, pgpLib) => {
Database = db;
pgp = pgpLib;
Collections = createColumnsets();
return DevicesRepository;
}
Property $config is there for integration purposes. That's why it exists only on the root Database level, and not inside tasks or transactions.
In order to make use of the helpers namespace, you should pass pgp into repositories when you initialize them, as shown within pg-promise-demo:
extend(obj, dc) {
obj.users = new repos.Users(obj, pgp);
obj.products = new repos.Products(obj, pgp);
}
You can establish the transaction outside the calls, then pass it in to those functions to have them use it.
That said, I'd recommend looking into a slightly higher-level query builder library such as Knex.js to save you from some of these (and future) headaches.
I have just moved from V0.13.2 to V0.14.2 and am now getting following error in event processing. The error appears to be in the composer-client code, not mine. Any ideas on resolving? Events are still being posted and processed by my app, so my code still appears to work, but the presence of these error messages is troubling and their volume is overwhelming my console window.
error: [EventHub.js]: on.data - Error unmarshalling transaction= TypeError: Cannot read property 'getSerializer' of null
at events.forEach (<path>Z2B_Master/Chapter12/node_modules/composer-client/lib/businessnetworkconnection.js:483:73)
at Array.forEach (native)
at HLFConnection.connection.on (<path>Z2B_Master/Chapter12/node_modules/composer-client/lib/businessnetworkconnection.js:482:29)
at emitOne (events.js:96:13)
at HLFConnection.emit (events.js:188:7)
at ChainCodeCBE.ccEvent.eventHubs.(anonymous function).registerChaincodeEvent [as onEvent] (<path>Z2B_Master/Chapter12/node_modules/composer-connector-hlfv1/lib/hlfconnection.js:231:22)
at <path>Z2B_Master/Chapter12/node_modules/fabric-client/lib/EventHub.js:810:12
at Set.forEach (native)
at EventHub._processChainCodeOnEvents (<path>Z2B_Master/Chapter12/node_modules/fabric-client/lib/EventHub.js:808:14)
at ClientDuplexStream.<anonymous> (<path>Z2B_Master/Chapter12/node_modules/fabric-client/lib/EventHub.js:311:10)
This error is not present using identical code in V0.13.
All events are emitted via a single function in the sample.js file. A transaction calls the function as in the following example:
/**
* create an order to purchase
* #param {org.acme.Z2BTestNetwork.CreateOrder} purchase - the order to be processed
* #transaction
*/
function CreateOrder(purchase) {
purchase.order.buyer = purchase.buyer;
purchase.order.amount = purchase.amount;
purchase.order.financeCo = purchase.financeCo;
purchase.order.created = new Date().toISOString();
purchase.order.status = JSON.stringify(orderStatus.Created);
return getAssetRegistry('org.acme.Z2BTestNetwork.Order')
.then(function (assetRegistry) {
return assetRegistry.update(purchase.order)
.then (function (_res)
{
z2bEmit('Created', purchase.order);
return (_res);
}).catch(function(error){return(error);});
});
}
Each transaction calls the z2bEmit function with a unique _event string.
function z2bEmit(_event, _order)
{
var method = 'z2bEmit';
var factory = getFactory();
var z2bEvent = factory.newEvent(ns, _event);
z2bEvent.orderID = _order.$identifier;
z2bEvent.buyerID = _order.buyer.$identifier;
emit(z2bEvent);
return
}
_order is a defined asset in the cto file, _event is a defined event in the cto file.
The client side code has a single routine, executed once, to set up monitoring:
/**
* Register for all of the available Z2BEvents
* #param {express.req} req - the inbound request object from the client
* #param {express.res} res - the outbound response object for communicating back to client
* #param {express.next} next - an express service to enable post processing prior to responding to the client
*/
exports.init_z2bEvents = function (req, res, next)
{
var method = 'init_z2bEvents';
if (bRegistered) {res.send('Already Registered');}
else{
bRegistered = true;
let _conn = svc.createAlertSocket();
let businessNetworkConnection;
businessNetworkConnection = new BusinessNetworkConnection();
// following line added to deal with eventListener error message that more eventListeners needed to be added
businessNetworkConnection.setMaxListeners(50);
return businessNetworkConnection.connect(config.composer.connectionProfile, config.composer.network, config.composer.adminID, config.composer.adminPW)
.then(() => {
businessNetworkConnection.on('event', (event) => {_monitor(svc.al_connection, svc.f_connection, event); });
res.send('event registration complete');
}).catch((error) => {
console.log(method+' business network connection failed'+error.message);
res.send(method+' business network connection failed'+error.message);
});
}
}
The connectionProfile is 'hlfv1'
and a single monitor routine, which figures out what kind of event has been posted and then uses a web socket to send that info to a browser so that an alert icon can be posted or updated. A shortened version of that function follows. _conn _f_conn continue to work correctly. The _event information is being passed in and continues to parse correctly. The eventhub.js messages appear on every alert, irrespective of how long the program runs.
/**
* _monitor
* #param {web.socket} _conn - web socket connection for general alerts
* #param {web.socket} _f_conn - web socket for finance alerts
* #param {org.acme.z2bNetwork.Event} _event - the event just emitted
*
*/
function _monitor(_conn, _f_conn, _event)
{
var method = '_monitor';
console.log(method+ ' _event received: '+_event.$type+' for Order: '+_event.orderID);
var event = {};
event.type = _event.$type;
event.orderID = _event.orderID;
event.ID = _event.buyerID;
_conn.sendUTF(JSON.stringify(event));
switch (_event.$type)
{
case 'Created':
break;
case 'Bought':
case 'PaymentRequested':
event.ID = _event.sellerID;
_conn.sendUTF(JSON.stringify(event));
event.ID = _event.financeCoID;
_f_conn.sendUTF(JSON.stringify(event));
break;
case 'Ordered':
case 'Cancelled':
case 'Backordered':
event.ID = _event.sellerID;
_conn.sendUTF(JSON.stringify(event));
event.ID = _event.providerID;
_conn.sendUTF(JSON.stringify(event));
break;
default:
break;
}
}
While unable to determine the root cause of this problem, it has gone away with the release of (and upgrade to) hyperledger-composer V0.15.2.