Handle smart contract transaction express nodejs - node.js

I have been coding my small project but I'm facing a problem.
here is my code:
app.get('/thu', (req, res) => {
thu(function(err, output){
if(err){
res.json({"err": ""+err, "output": output});
return;
}
res.send("ket qua: ", output);
});
});
var thu = function(callback){
web3.eth.getTransactionCount(senderAddress).then((txnCount) => {
console.log("goi thu");
var method = contract.methods.thu();
var encodedABI = method.encodeABI();
var thuTx = {
from: senderAddress,
to: contractAddress,
nonce: web3.utils.toHex(txnCount),
gasLimit: web3.utils.toHex(GAS_LIMIT),
gasPrice: web3.utils.toHex(GAS_PRICE),
data: encodedABI,
};
sendTxn(thuTx, callback);
}).catch((err) => {
console.log("web3 err", err);
callback(err, null);
});
};
function sendTxn(rawTx, callback) {
var privateKeyBuffer = new Buffer(privateKey, 'hex');
var transaction = new tx(rawTx);
transaction.sign(privateKeyBuffer);
var serializedTx = transaction.serialize().toString('hex');
web3.eth.sendSignedTransaction(
'0x' + serializedTx, function(err, txnHash) {
if(err) {
console.log("txn err", err);
callback(err, null);
} else {
console.log("txn result", txnHash);
}
}).catch((err) => {
callback(err, null);
});
}
I'm sure that my smart contract runs ok. when I hit submit the code send a transaction to Rinkeby and it is ok. but I cannot receive any responses.
Please help my solve my problems. thank you.

sendSignedTransaction returns a Promise combined event emitter.
Ethereum as a blockchain has different levels of finality and
therefore needs to return multiple “stages” of an action. To cope with
requirement we return a “promiEvent” for functions like
web3.eth.sendTransaction or contract methods. This “promiEvent” is a
promise combined with an event emitter to allow acting on different
stages of action on the blockchain, like a transaction.
You can place a console.log on every event, to see what is happening, or if you're getting an error.
web3.eth.sendSignedTransaction('0x' + serializedTx)
.once('transactionHash', hash => console.log(`Hash: ${hash}`)
.once('receipt', receipt => console.log(`Receipt: ${receipt}`)
.on('confirmation', (confNumber, receipt) => console.log(confNumber))
.on('error', error => console.error(error))
.then(receipt => {
// will be fired once the receipt its mined
});

Problems solved. the problem is I forgot the put the callback(...) in else {...}.

Related

TypeError: validator.escape is not a function - (express-validator#6.12.1 package)

Codecademy video: link
Explanation:
As part of my Codecademy Back-End Engineer training, I have to do a project outside of their platform. The goal of this project is to make sure a node application is protected from common web attacks.
One challenge I faced was securing the code from Cross-Site Scripting (XSS) attacks. To do this, I used a package called express-validator#6.12.1. The code uses a function called validator.escape which is supposed to protect against any malicious code being inserted into an input form. However, I am getting an error in the console when I try to use it.
Terminal output :
TypeError: validator.escape is not a function
Here is the code :
const validator = require("express-validator");
app.post("/public_forum", function (request, response) {
if (request.session.loggedin) {
var comment = validator.escape(request.body.comment);
var username = request.session.username;
if (comment) {
db.all(
`INSERT INTO public_forum (username,message) VALUES ('${username}','${comment}')`,
(err, rows) => {
console.log(err);
}
);
db.all(`SELECT username,message FROM public_forum`, (err, rows) => {
console.log(rows);
console.log(err);
response.render("forum", { rows });
});
} else {
db.all(`SELECT username,message FROM public_forum`, (err, rows) => {
console.log(rows);
console.log(err);
response.render("forum", { rows });
});
}
comment = "";
} else {
response.redirect("/");
}
comment = "";
//response.end();
});
In the video of Codecademy, the guy uses this function.
Try with:
const {check, validationResult} = require('express-validator');
app.post('/public_forum', async function (request, response) {
if (request.session.loggedin) {
await check('comment').trim().escape().run(req);
const validationResult = await validationResult(req);
if (validationResult.isEmpty()) {
// Good to go...
const { comment } = req.body;
}
...
Link to official docs
I have implemented your code. I tried to add both a malicious and safe comment, but I got an error message on my browser that said, "Port 4000 Not Found." Every time I run the code, it kills the port. So I have implemented another code that works well based on what you sent me.
// This code defines a post request handler for the "/public_forum" endpoint.
app.post('/public_forum', async function (request, response) {
// Check if the user is logged in by checking the session data.
if (request.session.loggedin) {
// Trim and escape the incoming comment.
await check('comment').trim().escape().run(request);
// Get the validation result of the incoming comment.
const errors = validationResult(request);
// If the validation result contains errors, return a 400 status with the errors in a JSON format.
if (!errors.isEmpty()) {
return response.status(400).json({ errors: errors.array() });
}
// Get the comment from the request body.
const { comment } = request.body;
// If a valid comment exists, insert it into the "public_forum" database table.
if (comment) {
db.run(
`INSERT INTO public_forum (username,message) VALUES (?,?)`, [request.session.username, comment],
(err) => {
// If an error occurs while inserting the comment, log the error.
if (err) {
console.error(err);
}
}
);
}
// Select all the rows from the "public_forum" table.
db.all(`SELECT username,message FROM public_forum`, (err, rows) => {
// If an error occurs while selecting the rows, log the error.
if (err) {
console.error(err);
}
// Log the selected rows.
console.log(rows);
// Render the "forum" template, passing in the selected rows as a parameter.
response.render("forum", { rows });
});
} else {
// If the user is not logged in, redirect them to the homepage.
response.redirect("/");
}
});

Sequelize transaction always returning null

I'm writing the backend for creating audit protocols. The user should be able to create criterias for the audit protocol. For this, i have the following backend-method to make sure, the protocol gets only created completely or the process of creating is canceled. It is possible to set several kinds of forms / criterias. But it could be, that only one kind of form is required. I do check that with the if-statement.
The creating works as expected. But the REST API always returns null to the clients. So i can't do further processing on the frontend regarding to the result of the creation process.
Technologies: Node.js and Sequelize. Frontend in angular / ionic. Database in mySQL.
I tried around with some transaction passing and return statements. I tried to compare it to a similiar code snippet, which works as expected.
exports.setAudit = (req, res, next) => {
trueFalseCriteria = req.body.trueFalseForms;
isShouldCriteria = req.body.isShouldForms;
generalCriteria = req.body.generalForms;
measurementCriteria = req.body.measurementForms;
toolId = req.body.toolId;
// Transaction is used to roll the whole transaction back if something wents wrong
return sequelize
.transaction(t => {
return audit
.create(
{
// Creating an audit referencing the tool
toolId: toolId
},
{ transaction: t }
)
.then(
// Getting the id of the audit that we just created
audit => {
return audit.id;
},
{ transaction: t }
)
.then(auditId => {
// Check wether the kind of form is used or not. If so, sequelize tries to do a bulk insert into the databases.
// Each bulk insert throws an error if it fails to cancel the whole transaction
if (trueFalseCriteria) {
console.log(1);
trueFalseCriteria.forEach(dataEl => {
dataEl.auditId = auditId;
});
trueFalseCriterion.bulkCreate(trueFalseCriteria).catch(err => {
// Throw error to cancel transaction
throw new Error(err);
});
}
if (isShouldCriteria) {
console.log(2);
isShouldCriteria.forEach(dataEl => {
dataEl.auditId = auditId;
});
isShouldCriterion.bulkCreate(isShouldCriteria).catch(err => {
// Throw error to cancel transaction
throw new Error(err);
});
}
if (generalCriteria) {
console.log(3);
generalCriteria.forEach(dataEl => {
dataEl.auditId = auditId;
});
generalCriterion.bulkCreate(generalCriteria).catch(err => {
// Throw error to cancel transaction
throw new Error(err);
});
}
if (measurementCriteria) {
console.log(4);
measurementCriteria.forEach(dataEl => {
dataEl.auditId = auditId;
});
measurementCriterion.bulkCreate(measurementCriteria).catch(err => {
// Throw error to cancel transaction
throw new Error(err);
});
}
}, { transaction: t });
})
.then(data => {
console.log(5);
res.status(200).json(data);
})
.catch(err => {
console.log(6);
if (!err.statusCode) {
err.statusCode = 500;
}
next(err);
});
};
Expected result: Http response with status code 200 on success
Actual result: null
I think you are missing a return for the last .then():
.then(auditId => {
// Check wether the kind of form is used or not. If so, sequelize tries to do a bulk insert into the databases.
.....
if (measurementCriteria) {
....
}
// RETURN SOMETHING HERE
}, { transaction: t });

How to 'pipe' oracle-db data from 'on data' event

I've been using node-oracledb for a few months and I've managed to achieve what I have needed to so far.
I'm currently working on a search app that could potentially return about 2m rows of data from a single call. To ensure I don't get a disconnect from the browser and the server, I thought I would try queryStream so that there is a constant flow of data back to the client.
I implemented the queryStream example as-is, and this worked fine for a few hundred thousand rows. However, when the returned rows is greater than one million, Node runs out of memory. By logging and watching both client and server log events, I can see that client is way behind the server in terms of rows sent and received. So, it looks like Node is falling over because it's buffering so much data.
It's worth noting that at this point, my selectstream implementation is within a req/res function called via Express.
To return the data, I do something like....
stream.on('data', function (data) {
rowcount++;
let obj = new myObjectConstructor(data);
res.write(JSON.stringify(obj.getJson());
});
I've been reading about how streams and pipe can help with flow, so what I'd like to be able to do is to be able to pipe the results from the query to a) help with flow and b) to be able to pipe the results to other functions before sending back to the client.
E.g.
function getData(req, res){
var stream = myQueryStream(connection, query);
stream
.pipe(toSomeOtherFunction)
.pipe(yetAnotherFunction)
.pipe(res);
}
I'm spent a few hours trying to find a solution or example that allows me to pipe results, but I'm stuck and need some help.
Apologies if I'm missing something obvious, but I'm still getting to grips with Node and especially streams.
Thanks in advance.
There's a bit of an impedance mismatch here. The queryStream API emits rows of JavaScript objects, but what you want to stream to the client is a JSON array. You basically have to add an open bracket to the beginning, a comma after each row, and a close bracket to the end.
I'll show you how to do this in a controller that uses the driver directly as you have done, instead of using separate database modules as I advocate in this series.
const oracledb = require('oracledb');
async function get(req, res, next) {
try {
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
res.write('[');
stream.on('data', (row) => {
res.write(JSON.stringify(row));
res.write(',');
});
stream.on('end', () => {
res.end(']');
});
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
Once you get the concepts, you can simplify things a bit with a reusable Transform class which allows you to use pipe in the controller logic:
const oracledb = require('oracledb');
const { Transform } = require('stream');
class ToJSONArray extends Transform {
constructor() {
super({objectMode: true});
this.push('[');
}
_transform (row, encoding, callback) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
this.push(',');
}
this._prevRow = row;
callback(null);
}
_flush (done) {
if (this._prevRow) {
this.push(JSON.stringify(this._prevRow));
}
this.push(']');
delete this._prevRow;
done();
}
}
async function get(req, res, next) {
try {
const toJSONArray = new ToJSONArray();
const conn = await oracledb.getConnection();
const stream = await conn.queryStream('select * from employees', [], {outFormat: oracledb.OBJECT});
res.writeHead(200, {'Content-Type': 'application/json'});
stream.pipe(toJSONArray).pipe(res);
stream.on('close', async () => {
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
stream.on('error', async (err) => {
next(err);
try {
await conn.close();
} catch (err) {
console.log(err);
}
});
} catch (err) {
next(err);
}
}
module.exports.get = get;
Rather than writing your own logic to create a JSON stream, you can use JSONStream to convert an object stream to (stringified) JSON, before piping it to its destination (res, process.stdout etc) This saves the need to muck around with .on('data',...) events.
In the example below, I've used pipeline from node's stream module rather than the .pipe method: the effect is similar (with better error handling I think). To get objects from oracledb.queryStream, you can specify option {outFormat: oracledb.OUT_FORMAT_OBJECT} (docs). Then you can make arbitrary modifications to the stream of objects produced. This can be done using a transform stream, made perhaps using through2-map, or if you need to drop or split rows, through2. Below the stream is sent to process.stdout after being stringified as JSON, but you could equally send to it express's res.
require('dotenv').config() // config from .env file
const JSONStream = require('JSONStream')
const oracledb = require('oracledb')
const { pipeline } = require('stream')
const map = require('through2-map') // see https://www.npmjs.com/package/through2-map
oracledb.getConnection({
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
connectString: process.env.CONNECT_STRING
}).then(connection => {
pipeline(
connection.queryStream(`
select dual.*,'test' as col1 from dual
union select dual.*, :someboundvalue as col1 from dual
`
,{"someboundvalue":"test5"} // binds
,{
prefetchRows: 150, // for tuning
fetchArraySize: 150, // for tuning
outFormat: oracledb.OUT_FORMAT_OBJECT
}
)
,map.obj((row,index) => {
row.arbitraryModification = index
return row
})
,JSONStream.stringify() // false gives ndjson
,process.stdout // or send to express's res
,(err) => { if(err) console.error(err) }
)
})
// [
// {"DUMMY":"X","COL1":"test","arbitraryModification":0}
// ,
// {"DUMMY":"X","COL1":"test5","arbitraryModification":1}
// ]

nodejs pg transactions without nesting

I would like to know if it's possible to run a series of SQL statements and have them all committed in a single transaction.
The scenario I am looking at is where an array has a series of values that I wish to insert into a table, not individually but as a unit.
I was looking at the following item which provides a framework for transactions in node using pg. The individual transactions appear to be nested within one another so I am unsure of how this would work with an array containing a variable number of elements.
https://github.com/brianc/node-postgres/wiki/Transactions
var pg = require('pg');
var rollback = function(client, done) {
client.query('ROLLBACK', function(err) {
//if there was a problem rolling back the query
//something is seriously messed up. Return the error
//to the done function to close & remove this client from
//the pool. If you leave a client in the pool with an unaborted
//transaction weird, hard to diagnose problems might happen.
return done(err);
});
};
pg.connect(function(err, client, done) {
if(err) throw err;
client.query('BEGIN', function(err) {
if(err) return rollback(client, done);
//as long as we do not call the `done` callback we can do
//whatever we want...the client is ours until we call `done`
//on the flip side, if you do call `done` before either COMMIT or ROLLBACK
//what you are doing is returning a client back to the pool while it
//is in the middle of a transaction.
//Returning a client while its in the middle of a transaction
//will lead to weird & hard to diagnose errors.
process.nextTick(function() {
var text = 'INSERT INTO account(money) VALUES($1) WHERE id = $2';
client.query(text, [100, 1], function(err) {
if(err) return rollback(client, done);
client.query(text, [-100, 2], function(err) {
if(err) return rollback(client, done);
client.query('COMMIT', done);
});
});
});
});
});
My array logic is:
banking.forEach(function(batch){
client.query(text, [batch.amount, batch.id], function(err, result);
}
pg-promise offers a very flexible support for transactions. See Transactions.
It also supports partial nested transactions, aka savepoints.
The library implements transactions automatically, which is what should be used these days, because too many things can go wrong, if you try organizing a transaction manually as you do in your example.
See a related question: Optional INSERT statement in a transaction
Here's a simple TypeScript solution to avoid pg-promise
import { PoolClient } from "pg"
import { pool } from "../database"
const tx = async (callback: (client: PoolClient) => void) => {
const client = await pool.connect();
try {
await client.query('BEGIN')
try {
await callback(client)
await client.query('COMMIT')
} catch (e) {
await client.query('ROLLBACK')
}
} finally {
client.release()
}
}
export { tx }
Usage:
...
let result;
await tx(async client => {
const { rows } = await client.query<{ cnt: string }>('SELECT COUNT(*) AS cnt FROM users WHERE username = $1', [username]);
result = parseInt(rows[0].cnt) > 0;
});
return result;

Node Survey Monkey Polling

I am trying to poll respondents that have changed answers or get new respondents but this code appears to pull in all of the data regardless. Have I got the wrong data key? The polling guide suggests adding start_modified_date but that doesn't work. Thanks in advance
var SurveyMonkeyAPI = require('surveymonkey').SurveyMonkeyAPI;
try {
var api = new SurveyMonkeyAPI(config.surveymonkey.key, config.surveymonkey.access_token, config.surveymonkey.params);
} catch (error) {
console.log(error.message);
}
var data = {
survey_id: surveyId,
fields: ["date_created", "date_modified", "status"],
start_modified_date: lastLoadedDate.toISOString().replace(/T/, ' ').replace(/\..+/, '')
};
console.log("FETCHING ", surveyId, data);
this.api.getRespondentList(data, this.bind(function (error, data) {
if (error)
console.log(error.message);
else
console.log(JSON.stringify(data)); // Do something with your data!
});
Here is the fork that has it fixed https://github.com/GeorgePhillips/node-surveymonkey

Resources