Node.js Query sqlite with 'sqlite` - node.js

I'm trying to get a hang of Node (I mainly use python) so I'm working on a small project to read an write data to a sqlite database.
I am having no issue writing to the database luckily, but I cannot seem to get queries to work at all. I've tested the queries in the sql terminal and they are successful.
So far, I have something like
const fs = require("fs");
const util = require("util");
const sqlite = require("sqlite");
const Promise = require("bluebird")
// const DATABASE = ":memory:";
const DATABASE = "./database.sqlite";
function insertDataIntoDatabase(transactions, db) {
// Write each transaction into the database.
let sqlStatement = "INSERT INTO Trx \
(name, address, amount, category) \
VALUES "
for (var i = 0; i < transactions.length; ++i) {
let trx = transactions[i];
sqlStatement += util.format(
"('%s', '%s', %d, '%s'), ",
trx.name,
trx.address,
trx.amount,
trx.category,
);
}
sqlStatement = sqlStatement.substring(0, sqlStatement.length - 2);
db.then(db => db.run(sqlStatement))
.catch((err) => console.log(err));
}
function getTransactions (db, category) {
// Return an array of valid transactions of a given category.
let where = "";
if (category) {
where = util.format("WHERE category='%s'", category);
}
let sqlStatement = util.format("SELECT * from Trx %s", where);
sqlStatement = "SELECT * from Trx"; // Trying to figure out whats happening
console.log(sqlStatement);
db.then(db => {
db.all(sqlStatement)
.then((err, rows) => {
console.log(rows); // undefined
console.log(err); // []
})
})
}
// Set up the db connection
const db = sqlite.open(DATABASE, { cached: true })
.then(db => db.migrate({ force: 'last' }));
// Read transactions and write them to the database
fs.readFile("transactions.json", "utf8", (err, data) => {
let transactions = JSON.parse(data).transactions;
insertDataIntoDatabase(transactions, db);
})
// Get transaction data
getValidTransactions(db, 'credit');
// Close connection to DB
db.then(db => db.close());

Looking at this again, I think the issue is the async nature of Node. The query was successful, but at that point in time, I had not inserted the data from the json file into the database yet, hence the empty query.

Related

nodejs & postgres - SQL getting executed out of order - newbie question, getting unique constraint error

it seems the SQL INSERT seems to be out of order(while executing) , all I am doing is checking if the record is present, if it is not present writing few rows, else if the record present updating it,
hope you can answer my newbie question:
Code snippet below
I am expecting if the record is present the code should execute the path of update, else it should insert , instead it is giving unique constraint error
`
const fs = require("fs");
const pg = require('pg');
const crypto = require("crypto");
// ~~~ GLOBAL DB Connection ~~~
const connection_string = 'postgres://postgres:PostgreSQLXXX#localhost:5432/postgres';
const client = new pg.Client(connection_string);
client.connect();
var input_sql_file="SQL_sample_input_1_0.txt";
process_SQL_text_file(input_sql_file);// call function
function process_SQL_text_file(ip_file_name)
{
var is_record_present=false;
var line_no=0;
const readline = require('readline');
const rl = readline.createInterface({
input: fs.createReadStream(ip_file_name),
terminal: false
});
// READ & process EACH ROW
rl.on('line', (line) => {
line_no++;
var current_SQL_stripped;
var Orginal_SQL_text=line;
var HASH_SHA256 = crypto.createHash('sha256').update(Orginal_SQL_text).digest('hex');
var SQL_find = `SELECT * from SQL_Summary WHERE SQL_Hash='${HASH_SHA256}'`;
try
{
client.query(SQL_find) // find hash record
.then( (res) => {
if((res.rows== null) || (res.rows.length < 1))
{
// Insert record TABLE 1
var SQL_Insert=`INSERT INTO sql_summary(SQL_text,sql_hash,usage_count) VALUES('${Orginal_SQL_text}','${HASH_SHA256}',1)`;
//console.log("SQL_Insert:",SQL_Insert);
client.query(SQL_Insert) // Insert row
.then((res2) => {
var SQL_insert_multi= `INSERT INTO table_names_log (table_name,SQL_hash,usage_count) VALUES('t1','${HASH_SHA256}',1),('t2','${HASH_SHA256}',1),('t3','${HASH_SHA256}',1)`;
client.query(SQL_insert_multi);
})
.finally(() => {
console.log("in 'finally:' in INSERT ...");
})
} else // record present
{
var SQL_update=`UPDATE sql_summary SET usage_count=usage_count+1 WHERE SQL_hash='${HASH_SHA256}'`;
//console.log("SQL_update:",SQL_update);
client.query(SQL_update)
}
})
.finally(() => {
console.log("finally block: is_record_present:",is_record_present);
});
} catch(err)
{
console.log("Catch err",err)
}
})// end of "rl'on('line')
rl.on('close', () => {
console.log("end of file, processed, line:",line_no)
}); // end of "rl.on('close'"
} // end of function
`
I suggest that instead of writing your own UPSERT, you use the tools available in the database. For recent versions of Postgres, this would be the ON CONFLICT clause for INSERT (link) or the MERGE command (link).
Without looking all the way through your code, doing UPSERTs is hard, and your code is certainly not

How do I get data from mongodb, manipulate it and print it

I want to connect to MongoDB and query a collection based on the filter 'category'. From the results, I want to randomly select one of entries, concatenate with another string and print to console. I am a novice at this and can't figure out how to get the results of my query and randomly select one of the entries from the query results.
//Connecting To DB
const mongoose = require('mongoose');
//const { makeCard } = require('./utils/card');
const db = mongoose.connection;
const host = process.env.host;
console.log(host)
const dbupdate = {
useNewUrlParser : true,
useUnifiedTopology : true
};
mongoose.connect(host, dbupdate);
db.on('error', (err) => console.log('Error, DB not connected'));
db.on('connected', () => console.log('connected to mongo'));
db.on('disconnected', () => console.log('Mongo is disconnected'));
db.on('open', () => console.log('Connection Made!'));
const Schema= mongoose.Schema;
const cardSchema = new Schema({
word: String,
visual : String,
category : String
});
const Card = mongoose.model('expressions', cardSchema );
--I want this function to return the results of the query, but it doesn't.
function getWords(ctgry ) {
const result = Card.find({"category" : ctgry },(error,results) => {
return results;
});
};
function getRandomInt(min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min) + min);
};
function getWord( ctgry ) {
const result = getWords( ctgry );
const num = getRandomInt(0,result.length); --this doesn't work
return result[num];
};
console.log("Sample text: " + getWord());
What you seem to be doing here is that you're fetching all the documents from the Card model for a particular category and picking a random one from it, which I wouldn't recommend. Not sure what data you're storing here, but what if your app grows to a point where there are a million documents in your cards collection? You wouldn't fetch them all in memory and pick a random one.
A good practice while using any database is to try to only fetch as much data is required. There's no way to fetch a random document from a query in mongodb from what I know, but there's a way to do this (inspired from this answer which I recommend you read).
async function getRandomWord(ctgry) {
const count = await User.count({
"category": ctgry,
}).exec();
const random = Math.floor(Math.random() * count);
return User.findOne({
"category": ctgry,
}).skip(random).exec();
}

How to test Hbase Get using Mocha/Node.js

Hi guys I have a job that has a method that extracts data from HBase given a key in Node.js, something like follows:
findHbaseData(field1, field2) {
const key = generateKey(field1, field2);
const data = hbase.get(key);
data['field3'] = data.field3.toUpperCase;
return data
}
The Hbase.get works like follows:
const HBaseRestClient = require('hbase');
const {Connection} = HBaseRestClient
this.client = HBaseRestClient(
this.confHbase // Here I have table name, host and port
);
this.table = this.confHbase.table;
async get(id, columns) {
let schema = this.getSchema(columns)
return await new Promise( (res, rej) => {
const row = this.client.table(this.table).row(id);
if (row) {
let convert = (err, values, response) => {
if (err) {
rej(err);
return;
}
const innerModel = Object.create(this.model);
const result = this.convertRow(values, innerModel, schema, columns);
res(result);
}
if (columns && columns.length) {
let hbaseColumns = this.schemaToHbaseColumns(columns)
row.get(hbaseColumns, convert);
} else {
row.get(convert);
}
}
} );
}
I need to test this method (findHbaseData) using mocha or chai but my environment test (Jenkins) can't access the Hbase, it's possible to use mocha to simulate this access and return fake data, instead to run the real hBase.get(key)?
Thanks!
I just discovered the answer.
I could create a HBaseRestClient.prototype.get(id,columns) and read the data from a json file.

PG Promise error when select then insert the data

i got error on my pg-promise like this:
this is my snippet code:
// create sample payload
let samplePayload = [];
for (let index = 0; index < 10; index++) {
samplePayload.push({
id: uuidv1(),
});
}
return db.task((t) => {
samplePayload.map( sp => {
// i want check if the id is exist on db
let sql = `select * from sf_data where id = '${sp.id}'`;
console.log(sql)
const sampleBatch = t.any(sql).then(th => {
// i want insert it if data not exists
console.log(th);
});
})
});
i want to check if the list of data is exist in DF. If not exists, need to insert the data.
i try to fix my old code and change into this
const sfdata = await t.any('SELECT * FROM sf_data');
const queries = sfdata.map((sp) => {
return t.oneOrNone('select * from sf_data where id = ${id}', sp).then(result => {
console.log(result)
if(result){
t.tx(async t2 => {
return t2.none("insert into test_sf values ($1, $2, $3)", [uuidv1(), result.id, result.sfid]);
})
}
});
});
return t.batch(queries);
but it return error:
(node:6547) UnhandledPromiseRejectionWarning: Error: Client was closed and is not queryable
at /Users/dsa/Telkom/dtp-dsa-middleware-sf/node_modules/pg/lib/client.js:570:27
at processTicksAndRejections (internal/process/task_queues.js:75:11)
any clue about this?
You are missing return in a few places, and your code creates a bunch of loose promises - queries that are trying to execute outside the connection, hence the error.
Here's correct code:
return db.task(t => {
const queries = samplePayload.map(sp => {
return t.any('select * from sf_data where id = ${id}', sp);
})
return t.batch(queries);
});
Better yet, this can be done with one query:
const ids = samplePayload.map(sp => sp.id);
return db.any('select * from sf_data where id in ($1)', [ids]);

ES6 Async/Await, ExpressJS and Postgres transactions

REVISED QUESTION
I've revised the question, in the hope of getting a clearer answer.
I'm trying to process data in ExpressJS, based on the incoming req.body and the existing data in the table.
I'm receiving a req.body that contains a JSON list of updated fields. Some of those fields are stored as JSONB in Postgres. If an incoming field is JSONB, then the form (external code) that is making the request has already run a jsonpatch.compare() to generate the list of patches, and it is these patches and not the full values that are being passed in. For any non-JSONB values, incoming values just need to be passed through to the UPDATE query.
I have a working version, as below, that pretends that the existing JSONB values in the table ARE NULL. Clearly, this is NOT what is needed. I need to pull the values from the db. The non-querying-of-current-values version and a bare minimum router, looks like this:
const express = require('express')
const bodyParser = require('body-parser')
const SQL = require('sql-template-strings')
const { Client } = require('pg')
const dbConfig = require('../db')
const jsonpatch = require('fast-json-patch')
const FormRouter = express.Router()
I have some update code:
````javascript
const patchFormsRoute = (req, res) => {
const client = new Client(dbConfig)
const { id } = req.body
const parts = []
const params = [id]
// list of JSONB fields for the 'forms' table
const jsonFields = [
'sections',
'editors',
'descriptions',
]
// list of all fields, including JSONB fields in the 'forms' table
const possibleFields = [
'status',
'version',
'detail',
'materials',
...jsonFields,
]
// this is a DUMMY RECORD instead of the result of a client.query
let currentRecord = { 'sections':[], 'editors':[], 'descriptions':[] }
possibleFields.forEach(myProp => {
if (req.body[myProp] != undefined) {
parts.push(`${myProp} = $${params.length + 1}`)
if (jsonFields.indexOf(myProp) > -1) {
val = currentRecord[myProp]
jsonpatch.applyPatch(val, req.body[myProp])
params.push(JSON.stringify(val))
} else {
params.push(req.body[myProp])
}
}
})
const updateQuery = 'UPDATE forms SET ' + parts.join(', ') + ' WHERE id = $1'
client.connect()
return client
.query(updateQuery, params)
.then(result => res.status(200).json(result.rowCount))
.catch(err => res.status(400).json(err.severity))
.then(() => client.end())
}
FormRouter.route('/')
.patch(bodyParser.json({ limit: '50mb' }), patchFormsRoute)
exports.FormRouter = FormRouter
I promise, that this is working code, which does almost what I need. However, I want to replace the dummy record with the data already in the table, fetched contemporaneously. My issue, is because multiple clients could be updating a row at the same time (but looking at orthogonal elements of the JSONB values), I need the fetch, calc, and update to happen as a SINGLE TRANSACTIOn. My plan is to:
BEGIN a transaction
Query Postgres for the current row value, based on the incoming id
For any JSONB fields, apply the patch to generate the correct value for that field in the UPDATE statement.
Run the UPDATE statement with the appropriate param values (either from the req.body or the patched row, depending on whether the field is JSONB or not)
COMMIT the transaction, or ROLLBACK on error.
I've tried implementing the answer from #midrizi; maybe it's just me, but the combination of awaits and plain testing of res sends the server off into Hyperspace... and ends in a timeout.
In case anyone is still awake, here's a working solution to my issue.
TLDR; RTFM: A pooled client with async/await minus the pooling (for now).
const patchFormsRoute = (req, res) => {
const { id } = req.body
// list of JSONB fields for the 'forms' table
const jsonFields = [
'sections',
'editors',
'descriptions',
]
// list of all fields, including JSONB fields in the 'forms' table
const possibleFields = [
'status',
'version',
'detail',
'materials',
...jsonFields,
]
const parts = []
const params = [id]
;(async () => {
const client = await new Client(dbConfig)
await client.connect()
try {
// begin a transaction
await client.query('BEGIN')
// get the current form data from DB
const fetchResult = await client.query(
SQL`SELECT * FROM forms WHERE id = ${id}`,
)
if (fetchResult.rowCount === 0) {
res.status(400).json(0)
await client.query('ROLLBACK')
} else {
const currentRecord = fetchResult.rows[0]
// patch JSONB values or update non-JSONB values
let val = []
possibleFields.forEach(myProp => {
if (req.body[myProp] != undefined) {
parts.push(`${myProp} = $${params.length + 1}`)
if (jsonFields.indexOf(myProp) > -1) {
val = currentRecord[myProp]
jsonpatch.applyPatch(val, req.body[myProp])
params.push(JSON.stringify(val))
} else {
params.push(req.body[myProp])
}
}
})
const updateQuery =
'UPDATE forms SET ' + parts.join(', ') + ' WHERE id = $1'
// update record in DB
const result = await client.query(updateQuery, params)
// commit transaction
await client.query('COMMIT')
res.status(200).json(result.rowCount)
}
} catch (err) {
await client.query('ROLLBACK')
res.status(400).json(err.severity)
throw err
} finally {
client.end()
}
})().catch(err => console.error(err.stack))
}

Resources