react-beautiful-dnd and mysql state management - node.js

okay. I'm confused as to the best way to do this:
the following pieces are in play: a node js server, a client-side react(with redux), a MYSql DB.
in the client app I have lists (many but for this issue, assume one), that I want to be able to reorder by drag and drop.
in the mysql DB the times are stored to represent a linked list (with a nextKey, lastKey, and productionKey(primary), along with the data fields),
//mysql column [productionKey, lastKey,nextKey, ...(other data)]
the current issue I'm having is a render issue. it stutters after every change.
I'm using these two function to get the initial order and to reorder
function SortLinkedList(linkedList)
{
var sortedList = [];
var map = new Map();
var currentID = null;
for(var i = 0; i < linkedList.length; i++)
{
var item = linkedList[i];
if(item?.lastKey === null)
{
currentID = item?.productionKey;
sortedList.push(item);
}
else
{
map.set(item?.lastKey, i);
}
}
while(sortedList.length < linkedList.length)
{
var nextItem = linkedList[map.get(currentID)];
sortedList.push(nextItem);
currentID = nextItem?.productionKey;
}
const filteredSafe=sortedList.filter(x=>x!==undefined)
//undefined appear because server has not fully updated yet, so linked list is broken
//nothing will render without this
return filteredSafe
;
}
const reorder = (list, startIndex, endIndex) => {
const result = Array.from(list);
const [removed] = result.splice(startIndex, 1);
result.splice(endIndex, 0, removed);
const adjustedResult = result.map((x,i,arr)=>{
if(i==0){
x.lastKey=null;
}else{
x.lastKey=arr[i-1].productionKey;
}
if(i==arr.length-1){
x.nextKey=null;
}else{
x.nextKey=arr[i+1].productionKey;
}
return x;
})
return adjustedResult;
};
I've got this function to get the items
const getItems = (list,jobList) =>
{
return list.map((x,i)=>{
const jobName=jobList.find(y=>y.jobsessionkey==x.attachedJobKey)?.JobName;
return {
id:`ProductionCardM${x.machineID}ID${x.productionKey}`,
attachedJobKey: x.attachedJobKey,
lastKey: x.lastKey,
machineID: x.machineID,
nextKey: x.nextKey,
productionKey: x.productionKey,
content:jobName
}
})
}
my onDragEnd
const onDragEnd=(result)=> {
if (!result.destination) {
return;
}
// dropped outside the list
const items = reorder(
state.items,
result.source.index,
result.destination.index,
);
dispatch(sendAdjustments(items));
//sends update to server
//server updates mysql
//server sends back update events from mysql in packets
//props sent to DnD component are updated
}
so the actual bug looks like the graphics are glitching - as things get temporarily filtered in the sortLinkedList function - resulting in jumpy divs. is there a smoother way to handle this client->server->DB->server->client dataflow that results in a consistent handling in DnD?
UPDATE:
still trying to solve this. currently implemented a lock pattern.
useEffect(()=>{
if(productionLock){
setState({
items: SortLinkedList(getItems(data,jobList)),
droppables: [{ id: "Original: not Dynamic" }]
})
setLoading(false);
}else{
console.log("locking first");
setLoading(true);
}
},[productionLock])
where production lock is set to true and false from triggers on the server...
basically: the app sends the data to the server, the server processes the request, then sends new data back, when it's finished the server sends the unlock signal.
which should trigger this update happening once, but it does not, it still re-renders on each state update to the app from the server.

What’s the code for sendAdjustments()?
You should update locally first, otherwise DnD pulls it back to its original position while you wait for backend to finish. This makes it appear glitchy. E.g:
Set the newly reordered list locally as your state
Send network request
If it fails, reverse local list state back to the original list

Related

Correct way to organise this process in Node

I need some advice on how to structure this function as at the moment it is not happening in the correct order due to node being asynchronous.
This is the flow I want to achieve; I don't need help with the code itself but with the order to achieve the end results and any suggestions on how to make it efficient
Node routes a GET request to my controller.
Controller reads a .csv file on local system and opens a read stream using fs module
Then use csv-parse module to convert that to an array line by line (many 100,000's of lines)
Start a try/catch block
With the current row from the csv, take a value and try to find it in a MongoDB
If found, take the ID and store the line from the CSV and this id as a foreign ID in a separate database
If not found, create an entry into the DB and take the new ID and then do 6.
Print out to terminal the row number being worked on (ideally at some point I would like to be able to send this value to the page and have it update like a progress bar as the rows are completed)
Here is a small part of the code structure that I am currently using;
const fs = require('fs');
const parse = require('csv-parse');
function addDataOne(req, id) {
const modelOneInstance = new InstanceOne({ ...code });
const resultOne = modelOneInstance.save();
return resultOne;
}
function addDataTwo(req, id) {
const modelTwoInstance = new InstanceTwo({ ...code });
const resultTwo = modelTwoInstance.save();
return resultTwo;
}
exports.add_data = (req, res) => {
const fileSys = 'public/data/';
const parsedData = [];
let i = 0;
fs.createReadStream(`${fileSys}${req.query.file}`)
.pipe(parse({}))
.on('data', (dataRow) => {
let RowObj = {
one: dataRow[0],
two: dataRow[1],
three: dataRow[2],
etc,
etc
};
try {
ModelOne.find(
{ propertyone: RowObj.one, propertytwo: RowObj.two },
'_id, foreign_id'
).exec((err, searchProp) => {
if (err) {
console.log(err);
} else {
if (searchProp.length > 1) {
console.log('too many returned from find function');
}
if (searchProp.length === 1) {
addDataOne(RowObj, searchProp[0]).then((result) => {
searchProp[0].foreign_id.push(result._id);
searchProp[0].save();
});
}
if (searchProp.length === 0) {
let resultAddProp = null;
addDataTwo(RowObj).then((result) => {
resultAddProp = result;
addDataOne(req, resultAddProp._id).then((result) => {
resultAddProp.foreign_id.push(result._id);
resultAddProp.save();
});
});
}
}
});
} catch (error) {
console.log(error);
}
i++;
let iString = i.toString();
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(iString);
})
.on('end', () => {
res.send('added');
});
};
I have tried to make the functions use async/await but it seems to conflict with the fs.openReadStream or csv parse functionality, probably due to my inexperience and lack of correct use of code...
I appreciate that this is a long question about the fundamentals of the code but just some tips/advice/pointers on how to get this going would be appreciated. I had it working when the data was sent one at a time via a post request from postman but can't implement the next stage which is to read from the csv file which contains many records
First of all you can make the following checks into one query:
if (searchProp.length === 1) {
if (searchProp.length === 0) {
Use upsert option in mongodb findOneAndUpdate query to update or upsert.
Secondly don't do this in main thread. Use a queue mechanism it will be much more efficient.
Queue which I personally use is Bull Queue.
https://github.com/OptimalBits/bull#basic-usage
This also provides the functionality you need of showing progress.
Also regarding using Async Await with ReadStream, a lot of example can be found on net such as : https://humanwhocodes.com/snippets/2019/05/nodejs-read-stream-promise/

How to execute a batch of transactions independently using pg-promise?

We're having an issue in our main data synchronization back-end function. Our client's mobile device is pushing changes daily, however last week they warned us some changes weren't updated in the main web app.
After some investigation in the logs, we found that there is indeed a single transaction that fails and rollback. However it appears that all the transactions before this one also rollback.
The code works this way. The data to synchronize is an array of "changesets", and each changset can update multiple tables at once. It's important that a changset be updated completely or not at all, so each is wrapped in a transaction. Then each transaction is executed one after the other. If a transaction fails, the others shouldn't be affected.
I suspect that all the transactions are actually combined somehow, possibly through the main db.task. Instead of just looping to execute the transactions, we're using a db.task to execute them in batch avoid update conflicts on the same tables.
Any advice how we could execute these transactions in batch and avoid this rollback issue?
Thanks, here's a snippet of the synchronization code:
// Begin task that will execute transactions one after the other
db.task(task => {
const transactions = [];
// Create a transaction for each changeset (propriete/fosse/inspection)
Object.values(data).forEach((change, index) => {
const logchange = { tx: index };
const c = {...change}; // Use a clone of the original change object
transactions.push(
task.tx(t => {
const queries = [];
// Propriete
if (Object.keys(c.propriete.params).length) {
const params = proprietes.parse(c.propriete.params);
const propriete = Object.assign({ idpropriete: c.propriete.id }, params);
logchange.propriete = { idpropriete: propriete.idpropriete };
queries.push(t.one(`SELECT ${Object.keys(params).join()} FROM propriete WHERE idpropriete = $1`, propriete.idpropriete).then(previous => {
logchange.propriete.previous = previous;
return t.result('UPDATE propriete SET' + qutil.setequal(params) + 'WHERE idpropriete = ${idpropriete}', propriete).then(result => {
logchange.propriete.new = params;
})
}));
}
else delete c.propriete;
// Fosse
if (Object.keys(c.fosse.params).length) {
const params = fosses.parse(c.fosse.params);
const fosse = Object.assign({ idfosse: c.fosse.id }, params);
logchange.fosse = { idfosse: fosse.idfosse };
queries.push(t.one(`SELECT ${Object.keys(params).join()} FROM fosse WHERE idfosse = $1`, fosse.idfosse).then(previous => {
logchange.fosse.previous = previous;
return t.result('UPDATE fosse SET' + qutil.setequal(params) + 'WHERE idfosse = ${idfosse}', fosse).then(result => {
logchange.fosse.new = params;
})
}));
}
else delete c.fosse;
// Inspection (rendezvous)
if (Object.keys(c.inspection.params).length) {
const params = rendezvous.parse(c.inspection.params);
const inspection = Object.assign({ idvisite: c.inspection.id }, params);
logchange.rendezvous = { idvisite: inspection.idvisite };
queries.push(t.one(`SELECT ${Object.keys(params).join()} FROM rendezvous WHERE idvisite = $1`, inspection.idvisite).then(previous => {
logchange.rendezvous.previous = previous;
return t.result('UPDATE rendezvous SET' + qutil.setequal(params) + 'WHERE idvisite = ${idvisite}', inspection).then(result => {
logchange.rendezvous.new = params;
})
}));
}
else delete change.inspection;
// Cheminees
c.cheminees = Object.values(c.cheminees).filter(cheminee => Object.keys(cheminee.params).length);
if (c.cheminees.length) {
logchange.cheminees = [];
c.cheminees.forEach(cheminee => {
const params = cheminees.parse(cheminee.params);
const ch = Object.assign({ idcheminee: cheminee.id }, params);
const logcheminee = { idcheminee: ch.idcheminee };
queries.push(t.one(`SELECT ${Object.keys(params).join()} FROM cheminee WHERE idcheminee = $1`, ch.idcheminee).then(previous => {
logcheminee.previous = previous;
return t.result('UPDATE cheminee SET' + qutil.setequal(params) + 'WHERE idcheminee = ${idcheminee}', ch).then(result => {
logcheminee.new = params;
logchange.cheminees.push(logcheminee);
})
}));
});
}
else delete c.cheminees;
// Lock from further changes on the mobile device
// Note: this change will be sent back to the mobile in part 2 of the synchronization
queries.push(t.result('UPDATE rendezvous SET timesync = now() WHERE idvisite = $1', [c.idvisite]));
console.log(`transaction#${++transactionCount}`);
return t.batch(queries).then(result => { // Transaction complete
logdata.transactions.push(logchange);
});
})
.catch(function (err) { // Transaction failed for this changeset, rollback
logdata.errors.push({ error: err, change: change }); // Provide error message and original change object to mobile device
console.error(JSON.stringify(logdata.errors));
})
);
});
console.log(`Total transactions: ${transactions.length}`);
return task.batch(transactions).then(result => { // All transactions complete
// Log everything that was uploaded from the mobile device
log.log(res, JSON.stringify(logdata));
});
I apologize, this is almost impossible to make a final good answer when the question is wrong on too many levels...
It's important that a change set be updated completely or not at all, so each is wrapped in a transaction.
If the change set requires data integrity, the whole thing must be one transaction, and not a set of transactions.
Then each transaction is executed one after the other. If a transaction fails, the others shouldn't be affected.
Again, data integrity is what a single transaction guarantees, you need to make it into one transaction, not multiple.
I suspect that all the transactions are actually combined somehow, possibly through the main db.task.
They are combined, and not through task, but through method tx.
Any advice how we could execute these transactions in batch and avoid this rollback issue?
By joining them into a single transaction.
You would use a single tx call at the top, and that's it, no tasks needed there. And in case the code underneath makes use of its own transactions, you can update it to allow conditional transactions.
Also, when building complex transactions, an app benefits a lot from using the repository patterns shown in pg-promise-demo. You can have methods inside repositories that support conditional transactions.
And you should redo your code to avoid horrible things it does, like manual query formatting. For example, never use things like SELECT ${Object.keys(params).join()}, that's a recipe for disaster. Use the proper query formatting that pg-promise gives you, like SQL Names in this case.

Any way to reduce the amount of concurrent requests to fetch data and cache in nodejs?

I have an Express app which requires very low response rate ~<200ms. Right now we can only get this number but that's a separate topic.
We're planning to fetch a piece of data from the database, if found in Redis return the data if not then fire the request and save that to redis so the next requests can get it from Redis.
I'm running some testing and was wondering if there's a way to reduce the amount of database fetching requests?
For example, currently our application has 300req/s per box. We have six boxes running on AWS. If for the first time that piece of data is not available in Redis, there might be around ~500 requests trying to fetch the data from DB and cache that in Redis. We're trying to reduce that number down. Not sure if there's a way in Node.js or Redis to handle that.
Here's the code that I'm testing.
client.getAsync('key').then(function (data) {
if(data) {
console.log(data); // Return this data if found
res.send(data);
} else {
// I'm trying to reduce the number of calls for concurrent requests in this block.
console.log('not found');
var dataFromDb = // fetch data from DB
client.set('key', dataFromDb); // Fire and forget
res.send('not found'); // Return not found right away
}
});
And I test the call by using ab
ab -n 20 -c 10 http://localhost:8081/redis
This is the results I got
not found
not found
not found
not found
not found
not found
something
not found
something
something
something
something
something
something
something
something
something
something
In this example, there's 7 requests trying to fetch database with the same data and save to Redis.
My question is, is there anyway I can reduce the number of requests down? Because fetching DB is quite slow as of now ~900ms (We're trying to optimize that)
Yes there is. I did same thing. I will describe only logic here. Method to fetchCache should return a promise. Also you keep array of { cacheKey, promise }. Each time you send a request - you add key to this array. When next time you need to fetch cache - you check array first and if key there grabbing this promise. Else calling fetchCache.
Here is my code. It works, but probably hard to read. Should give you a basic understanding.
class DictTranslatableRepo {
constructor(model) {
var self = this;
self.title = model + "s Repo";
self.model = models[model];
self.running = {};
self.curItems = {};
}
*start() {
var self = this;
var curItems = yield self.model.findAll();
_.forEach(curItems, function(row) {
self.curItems[row.key] = row.value;
});
};
*map(from) {
var self = this;
if (from == "") return "";
if (!_.isUndefined(self.curItems[from])) return self.curItems[from];
if (_.isUndefined(self.running[from])) {
self.running[from] = [];
return new Promise(function(resolve, reject) {
self.running[from].push(resolve);
self.job(from, function(err, to) { // Main job
var callbackArr = self.running[from];
delete self.running[from];
_.forEach(callbackArr, function(callback) {
callback(to);
});
});
});
} else {
return new Promise(function(resolve, reject) {
self.running[from].push(resolve);
});
}
};
job(from, callback) {
var self = this;
var to = "as shown";
co(function*() {
try {
to = yield translator.translate(from);
yield self.model.add({key: from, value: to});
self.curItems[from] = to;
callback(null, to);
} catch (err) {
callback(err);
//logger.error("Cant translate entity: " + from);
}
}).catch(function(err) {
// Unhandled Error
callback(new Error(err));
});
};
}
My map method is your fetchCache method.

Passing a return from one function to another function that already has set parameters?

Edit: I know JS is asynchronous, I have looked over the How to Return thread. The issue I'm having is that going from "foo" examples to something specific = I'm not quite sure where to re-format this.
Also here is some context: https://github.com/sharkwheels/beanballs/blob/master/bean-to-osc-two.js
I have a question about returns in node. It might be a dumb question, but here goes. I have a function that connects to a socket, and gets OSC messages from processing:
var sock = dgram.createSocket("udp4", function(msg, rinfo) {
try {
// get at all that info being sent out from Processing.
//console.log(osc.fromBuffer(msg));
var getMsg = osc.fromBuffer(msg);
var isMsg = getMsg.args[0].value;
var isName = getMsg.args[1].value;
var isAdd = getMsg.address;
var isType = getMsg.oscType;
// make an array out of it
var isAll = [];
isAll.push(isName);
isAll.push(isMsg);
isAll.push(isAdd);
isAll.push(isType);
// return the array
console.log(isAll);
return isAll;
} catch (error) {
console.log(error);
}
});
Below I have the start of another function, to write some of that array to a BLE device. It needs name and characteristics from a different function. How do I get the below function to use isAll AND two existing parameters?
var writeToChars = function (name, characteristics) { // this is passing values from the BLE setup function
// i need to get isAll to here.
// eventually this will write some values from isAll into a scratch bank.
}
Thanks.
async call in this case be written something like this. state can be maintained in the variables in closure if required. In this particular case - you can do without any state (isAll) as well.
var isAll;
var soc = dgram.createSocket('udp4', oncreatesocket);
function oncreatesocket(msg, rinfo)
{
isAll = parseMessage(msg);
writeData(isAll);
}
function parseMessage(msg) {
...
// code to parse msg and return isAll
}
function writeData() {}
if the writeData is small enough function. It can be inside oncreatesocket without impacting the readability of the code.
Alright. So I figured out what to do, at least in this scenario. I'm sure there is a better way to do this, but for now, this works.
I'm mapping an existing global array of peripherals into the write function, while passing the OSC message to it as a parameter. This solved my issue of "how do I get two pieces of information to the same place". It figures out which peripheral is which and writes a different value to each scratch bank of each peripheral accordingly. Leaving here for future reference.
var writeToBean = function(passThrough){
var passThrough = passThrough;
console.log("in Write to bean: ", passThrough);
_.map(beanArray, function(n){
if(n.advertisement.localName === passThrough.name){
//var name = n.advertisement.localName;
n.discoverSomeServicesAndCharacteristics(['a495ff20c5b14b44b5121370f02d74de'], [scratchThr], function(error, services, characteristics){
var service = services[0];
var characteristic = characteristics[0];
var toSend = passThrough.msg;
console.log("service", service);
console.log("characteristic", characteristic);
if (toSend != null) {
characteristic.write(new Buffer([toSend]), false, function(error) {
if (error) { console.log(error); }
console.log("wrote " + toSend + " to scratch bank 3");
});
}
// not sure how to make the program resume, it stops here. No error, just stops processing.
});
}
});
}

Persist data on disk using chrome extension API

I am trying to save some data which should be available even when restart the browser So this data should persist. I am using Chrome Storage Sync API for this. But when I am restarting my browser, I get empty object on using chrome.storage.get.
Here is my sample code:
SW.methods.saveTaskListStore = function() {
chrome.storage.sync.set({
'taskListStore': SW.stores.taskListStore
}, function() {
if (SW.callbacks.watchProcessSuccessCallback) {
SW.callbacks.watchProcessSuccessCallback(SW.messages.INFO_DATA_SAVED);
SW.callbacks.watchProcessSuccessCallback = null;
}
});
};
SW.methods.loadTaskListStore = function() {
SW.stores.loadTaskListStore = [];
chrome.storage.sync.get('taskListStore', function(taskFeed) {
var tasks = taskFeed.tasks;
if (tasks && !tasks.length) {
SW.stores.loadTaskListStore = tasks;
}
});
};
I guess I am using the Wrong API.
If this is not some copy-paste error, you are storing under key taskListStore and trying to get data under key loadTaskListStore.
Besides that, according to the documentation on StorageArea.get(), the result object is an object with items in their key-value mappings. Thus, in your case, you should do:
chrome.storage.sync.get("taskListStore", function(items) {
if (items.taskListStore) {
var tasks = items.taskListStore.tasks;
...

Resources