Hey guys i cant implement the function in nodejs what can parse data from csv to postgresql table. Can u help with this question?
I'm trying to use the csv-stream library and can get the datas in console, but how to do an import to postgresql no idea.
var csvStream = csv.createStream(options);
fs.createReadStream(process.argv[2]).pipe(csvStream)
.on('error', function (err) {
console.error(err);
})
.on('data', function (data) {
// outputs an object containing a set of key/value pair representing a line found in the csv file.
// console.log(data);
})
.on('column', function (key, value) {
// outputs the column name associated with the value found
// console.log('#' + key + ' = ' + value);
console.log('# ' + value);
})
UPDATE
var pg = require("pg");
var conString = "pg://admin:guest#localhost:5432/Employees";
var client = new pg.Client(conString);
client.connect();
var query = client.query("SELECT firstname, lastname FROM emps ORDER BY lastname, firstname");
and now how i can get this select query to csv?
UPDATE 0.1
var pg = require('pg');
var csv = require('csv');
var csvWriter = require('csv-write-stream')
var conString = "pg://admin:admin#localhost:5432/labels";
var client = new pg.Client(conString);
client.connect();
var sql = "COPY test TO 'personss.csv' DELIMITER ',' CSV HEADER;"
var query = client.query("SELECT firstname, lastname FROM test ORDER BY lastname, firstname");
query.on("row", function (row, result) {
result.addRow(row);
});
query.on("end", function (result) {
console.log((result.rows, null, " "));
client.end();
});
Now probably i need to do each cycles in result.row.
Related
it seems the SQL INSERT seems to be out of order(while executing) , all I am doing is checking if the record is present, if it is not present writing few rows, else if the record present updating it,
hope you can answer my newbie question:
Code snippet below
I am expecting if the record is present the code should execute the path of update, else it should insert , instead it is giving unique constraint error
`
const fs = require("fs");
const pg = require('pg');
const crypto = require("crypto");
// ~~~ GLOBAL DB Connection ~~~
const connection_string = 'postgres://postgres:PostgreSQLXXX#localhost:5432/postgres';
const client = new pg.Client(connection_string);
client.connect();
var input_sql_file="SQL_sample_input_1_0.txt";
process_SQL_text_file(input_sql_file);// call function
function process_SQL_text_file(ip_file_name)
{
var is_record_present=false;
var line_no=0;
const readline = require('readline');
const rl = readline.createInterface({
input: fs.createReadStream(ip_file_name),
terminal: false
});
// READ & process EACH ROW
rl.on('line', (line) => {
line_no++;
var current_SQL_stripped;
var Orginal_SQL_text=line;
var HASH_SHA256 = crypto.createHash('sha256').update(Orginal_SQL_text).digest('hex');
var SQL_find = `SELECT * from SQL_Summary WHERE SQL_Hash='${HASH_SHA256}'`;
try
{
client.query(SQL_find) // find hash record
.then( (res) => {
if((res.rows== null) || (res.rows.length < 1))
{
// Insert record TABLE 1
var SQL_Insert=`INSERT INTO sql_summary(SQL_text,sql_hash,usage_count) VALUES('${Orginal_SQL_text}','${HASH_SHA256}',1)`;
//console.log("SQL_Insert:",SQL_Insert);
client.query(SQL_Insert) // Insert row
.then((res2) => {
var SQL_insert_multi= `INSERT INTO table_names_log (table_name,SQL_hash,usage_count) VALUES('t1','${HASH_SHA256}',1),('t2','${HASH_SHA256}',1),('t3','${HASH_SHA256}',1)`;
client.query(SQL_insert_multi);
})
.finally(() => {
console.log("in 'finally:' in INSERT ...");
})
} else // record present
{
var SQL_update=`UPDATE sql_summary SET usage_count=usage_count+1 WHERE SQL_hash='${HASH_SHA256}'`;
//console.log("SQL_update:",SQL_update);
client.query(SQL_update)
}
})
.finally(() => {
console.log("finally block: is_record_present:",is_record_present);
});
} catch(err)
{
console.log("Catch err",err)
}
})// end of "rl'on('line')
rl.on('close', () => {
console.log("end of file, processed, line:",line_no)
}); // end of "rl.on('close'"
} // end of function
`
I suggest that instead of writing your own UPSERT, you use the tools available in the database. For recent versions of Postgres, this would be the ON CONFLICT clause for INSERT (link) or the MERGE command (link).
Without looking all the way through your code, doing UPSERTs is hard, and your code is certainly not
i am create node js API , getting one problem please let me know solution ,
here is my code
var searchdataArray = [];
SearchData.forEach(function(item){
SearchDataJson.KeyTags = item.hashtags;
// var d = await globalVar.data.trendingPodData();
// console.log(d);
var sql = "SELECT email , mobile from user_profile where id=228 limit 2";
pool.query(sql, function (err, result, fields) {
var myJSON = JSON.stringify(result);
var array = JSON.parse(myJSON);
SearchDataJson.Data = array;
searchdataArray.push(SearchDataJson);
console.log(searchdataArray);
});
});
my requirement is getting searchdataArray variable out side the poo.query function.
I have an issue with not able to get the affected rows result from the following
During the debug I notice it always crashes at conn.querySync(query.sqlUpdate, params);
Console.log is not showing anything as well.
What did I do wrong here?
CODE
//imports
const format = require('string-format');
const query = require('../db/query');
const message = require('../common/message');
const constant = require('../common/constant');
var ibmdb = require("ibm_db");
require('dotenv').config();
// access the environment variables for this environment
const database = "DATABASE=" + process.env.DATABASE + ";";
const hostname = "HOSTNAME=" + process.env.HOSTNAME + ";";
const uid = "UID=" + process.env.UID + ";";
const pwd = "PWD=" + process.env.PWD + ";";
const dbport = "PORT=" + process.env.DBPORT + ";";
const protocol = "PROTOCOL=" + process.env.PROTOCOL;
const connString = database+hostname+uid+pwd+dbport+protocol;
function updateContact(params) {
ibmdb.open(connString, function(err, conn){
//blocks until the query is completed and all data has been acquired
var rows = conn.querySync(query.sqlUpdate, params);
console.log(rows);
});
}
module.exports.updateContact = updateContact;
I finally understand what the problem is.
The problem lies in me using the querySync function. This function not return affected row counts.
https://github.com/ibmdb/node-ibm_db/blob/master/APIDocumentation.md#querySyncApi
The proper way is to use prepare followed by executeNonQuery.
https://github.com/ibmdb/node-ibm_db/blob/master/APIDocumentation.md#executeNonQueryApi
So from the API, i modify my codes.
...
conn.prepare(query.SQL_UPDATE, function (error, stmt) {
if (err) {
console.log(err);
return conn.closeSync();
}
stmt.executeNonQuery(params, function (err, result) {
if( err ) {
console.log(err);
}
else {
console.log("Affected rows = " + result);
}
//Close the connection
conn.close();
});
});
...
I'm trying to get a hang of Node (I mainly use python) so I'm working on a small project to read an write data to a sqlite database.
I am having no issue writing to the database luckily, but I cannot seem to get queries to work at all. I've tested the queries in the sql terminal and they are successful.
So far, I have something like
const fs = require("fs");
const util = require("util");
const sqlite = require("sqlite");
const Promise = require("bluebird")
// const DATABASE = ":memory:";
const DATABASE = "./database.sqlite";
function insertDataIntoDatabase(transactions, db) {
// Write each transaction into the database.
let sqlStatement = "INSERT INTO Trx \
(name, address, amount, category) \
VALUES "
for (var i = 0; i < transactions.length; ++i) {
let trx = transactions[i];
sqlStatement += util.format(
"('%s', '%s', %d, '%s'), ",
trx.name,
trx.address,
trx.amount,
trx.category,
);
}
sqlStatement = sqlStatement.substring(0, sqlStatement.length - 2);
db.then(db => db.run(sqlStatement))
.catch((err) => console.log(err));
}
function getTransactions (db, category) {
// Return an array of valid transactions of a given category.
let where = "";
if (category) {
where = util.format("WHERE category='%s'", category);
}
let sqlStatement = util.format("SELECT * from Trx %s", where);
sqlStatement = "SELECT * from Trx"; // Trying to figure out whats happening
console.log(sqlStatement);
db.then(db => {
db.all(sqlStatement)
.then((err, rows) => {
console.log(rows); // undefined
console.log(err); // []
})
})
}
// Set up the db connection
const db = sqlite.open(DATABASE, { cached: true })
.then(db => db.migrate({ force: 'last' }));
// Read transactions and write them to the database
fs.readFile("transactions.json", "utf8", (err, data) => {
let transactions = JSON.parse(data).transactions;
insertDataIntoDatabase(transactions, db);
})
// Get transaction data
getValidTransactions(db, 'credit');
// Close connection to DB
db.then(db => db.close());
Looking at this again, I think the issue is the async nature of Node. The query was successful, but at that point in time, I had not inserted the data from the json file into the database yet, hence the empty query.
I am new to node and writing a small application. I haven't used a language as asynchronous as this on the server before and have myself in a bit of a pickle. I need to take a string, query a table for an id, then insert in a second table using the result, then return a string from the funtion two levels up. I have a custom dao I use for the db stuff. Here is the function where it all happens:
function generateToken(data, userId, client) {
var random = Math.floor(Math.random() * 100001);
var sha256 = crypto.createHmac("sha256", random );
var token = sha256.update(data).digest("base64");
var query = dao.select(
'auth.apps',
{
name: client.name,
version: client.version,
subversion: client.subversion,
patch: client.patch
}
).done(
function(result) {
dao.insert(
'auth.tokens',
{
user_id:userId,
app_id: result.rows[0].id,
token:token
}
);
return "mmmm yellllo";
}
);
var ret_val = await(query);
console.log("Token return: " + ret_val);
return ret_val;
}
and here is the relevant part of my dao for select:
dbo.prototype.select = function(table, where, order_by) {
var where_clause = this.construct_where(where);
var sql = 'SELECT * FROM ' + table + ' WHERE ' + where_clause;
if(order_by !== undefined) {
sql = sql + ' ORDER BY ' + order_by;
};
var result = this.pool.query(sql);
return result;
};
and insert:
dbo.prototype.insert= function(table, values) {
var key_list='', value_list = '';
for( var k in values)
{
key_list = key_list + ', ' + k;
value_list = value_list + ", '" + values[k] + "'";
}
// chop off comma space
key_list = key_list.substring(2);
value_list = value_list.substring(2);
var sql = 'INSERT INTO ' + table + '(' + key_list + ') VALUES(' + value_list + ') RETURNING id';
var result = this.pool.query(sql).catch(function(error) {
console.log("SQL:" + sql + " error:" + error);
});
return result;
};
How do unwind the double promise. I want the generateToken function to return the token variable but only after the insert query has finished.
There is a library named deasync.
And the motivation to create it was to solve the situations when
API cannot be changed to return merely a promise or demand a callback
parameter
So this is the primary and probably the only use case. Because in general Node.js should stay async.
To do the trick you basically should write a function that accepts a callback and then wrap it with deasync as follows:
var deasync = require('deasync');
//It can still take the params before the callback
var asyncGenerateToken = function (data, userId, client, callback) {
var token = 'abc';
//Async operation starts here
setTimeout(function () {
//Async operation is finished, now we can return the token
//Don't forget that the error is 1st arg, data is the 2nd
callback(null, token);
}, 1000);
};
var generateToken = deasync(asyncGenerateToken);
//We'll retrieve a token only after a second of waiting
var token = generateToken('my data', 'my user id', 'my client');
console.log(token);
Hope this helps.