Insertion of data to PostgreSQL using Google Trends API - node.js

I have fetched data from Google Trends API and now I am trying to insert it in the database but at a time only one row is being inserted and I want to insert all the rows into the database.
try {
var res = await googleTrends.interestByRegion({keyword:keyword,geo:"US-"+state}); //resolution:keyword
res = JSON.parse(res);
// for debugging.
//console.log(res);
if("default" in res){
if("geoMapData" in res["default"]){
for(var x in res["default"].geoMapData){
var row = res["default"].geoMapData[x];
if("geoName" in row && "value" in row){
// console.log(" - " + row.geoName + "\t\t" + row.value);
console.log(">>>>>>>>>>>>>>>>>>",x,row.geoName + row.value)
//insert statement
var name1 = [row.geoName]
// name1.push([keyword,row.geoName])
// var name2 = [row.geoName]
pool.query("INSERT INTO practice(state,keyword) VALUES($1)",name1,(err, res) => {
console.log(err, res);
// pool.end(() => {
// console.log('pool has ended')
//})
});
I am getting this error after compiling.
error: INSERT has more target columns than expressions
at Connection.parseE (/home/abc/Desktop/node2/node_modules/pg/lib/connection.js:604:13)
at Connection.parseMessage (/home/abc/Desktop/node2/node_modules/pg/lib/connection.js:403:19)
at Socket.<anonymous> (/home/abc/Desktop/node2/node_modules/pg/lib/connection.js:123:22

Related

Cannot use “undefined” as a Firestore value

I am trying to retrieve data from my Firestore database using node.js, I want to collect a field from one Firestore query and pass the value into another Firestore query but I keep getting this error in my logs, the first Firestore query successfully retrieves data, but my problem is passing a value to the second query
Error: Value for argument "value" is not a valid query constraint. Cannot use "undefined" as a Firestore value. If you want to ignore undefined values, enable `ignoreUndefinedProperties`. at Object.validateUserInput (/workspace/node_modules/#google-cloud/firestore/build/src/serializer.js:271:19) at validateQueryValue (/workspace/node_modules/#google-cloud/firestore/build/src/reference.js:2048:18) at CollectionReference.where (/workspace/node_modules/#google-cloud/firestore/build/src/reference.js:988:9) at step2 (/workspace/index.js:74:43) at /workspace/index.js:65:17 at QuerySnapshot.forEach (/workspace/node_modules/#google-cloud/firestore/build/src/reference.js:748:22) at updateBets (/workspace/index.js:60:22) at processTicksAndRejections (internal/process/task_queues.js:97:5)
here is my code
async function updateBets() {
var marketRef = db.collection('matches');
var snapshot = await marketRef.where('matchStatus', '==', 'FINISHED').get();
if (snapshot.empty) {
console.log('No matching documents.');
return;
}
console.log('I found documents');
snapshot.forEach(doc => {
step2();
async function step2() {
var marketRef2 = db.collection('markets');
var snapshot2 = await marketRef2.where('marketId', '==', doc.data().matchId).get();
console.log(doc2.id, '=>', doc2.data());
snapshot2.forEach(doc2 => {
console.log(doc2.id, '=>', doc2.data());
if (doc2.data().marketTitleId == 'FULL_TIME_RESULT') {
var a = doc.data().homeTeamScore;
var b = doc.data().awayTeamScore;
var winnerIndex;
if (a > b) {
winnerIndex = 0;
var resultIndex = ['WINNER', 'LOSER', 'LOSER'];
var docName = `${doc.data().matchId}` + '000' + '1';
var sfRef = db.collection('markets').doc(docName);
batch5.update(sfRef, {
results: resultIndex
});
} else if (a == b) {
winnerIndex = 1;
var docName = `${doc.data().matchId}` + '000' + '1';
var resultIndex = ['LOSER', 'WINNER', 'LOSER'];
var sfRef = db.collection('markets').doc(docName);
batch5.update(sfRef, {
results: resultIndex
});
} else if (a < b) {
winnerIndex = 2;
var docName = `${doc.data().matchId}` + '000' + '1';
var resultIndex = ['LOSER', 'LOSER', 'WINNER'];
var sfRef = db.collection('markets').doc(docName);
batch5.update(sfRef, {
results: resultIndex
});
}
}
})
}
});
batch5.commit().then(() => {
console.log("im done with results");
}).catch((err) => {
console.log('Mac! there was an error with results: ', err);
});
}
You could try:
const data = doc.data();
const matchId = data.matchId;
and then put matchId into query.
Also log the "matchId" variable to see the value.

Node.js call back function on termination of a user defined function

I have a node.js app consisting of a timer calling a user defined function made up of a bunch of functions in node language. The calling script has a timer calling function mybuy() every 10 seconds; mybuy() buys crypto currencies using Binance api according trigger prices contained in a mySQL table (alarms). I would like to start mysell() (not shown , but similar to myBuy()) right after mybuy() has run its course.
How to make mysell() the callback function of mybuy()?
This the calling script:
var fs = require('fs');
var sl = require('alberto/buy');
var loop = 0;
setImmediate(() => {
// start the log
fs.appendFile('./log.txt', "\n Loop-> " + loop + "\n", function (err) {
if (err) { console.log(err); }
})
//execute the function
sl.mybuy(); // USD function; everything happens here.Can take long to finish
var myInt = setInterval(function () {
loop++;
fs.appendFile('./log.txt', "Loop-> " + loop + "\n", function (err) {
if (err) { console.log(err); }
})
//execute every 10 secs
sl.mybuy();
if (loop > 5) { clearInterval(myInt); } // max 6 loops for testing
}, 10000);
});
the UDF id here
exports.mybuy = function () {
var fs = require('fs'); // I keep a log.txt
process.stdout.write("\u001b[2J\u001b[0;0H");// clear screen
aww = (new Date()).toJSON().slice(0, 19).replace(/[-T]/, '-');
aww = aww.replace(/T/, ' ');
console.log(aww, '\n\n'); // practicing with dates
var mysql = require('mysql');
var con = mysql.createConnection({
host: "www.photobangkok.com",
user: "photoban_user",
password: "xxxxxxxx",
database: "photoban_datab"
});
// 'added' is for never processed entries in alarms table.It will change to BOUGHT or SOLD
sql = "SELECT rec, id,coin,buy,amount_b,stat FROM alarms where stat='added' AND buy>0 order by coin";
var cnt = 0; // not used, perhaps an idea to emit an event when cnt reaches the number of rows
con.query(sql, function (err, result) {
if (err) throw err;
str = "";
for (var index in result) {
str = result[index].rec + "-" + result[index].id + "-" + result[index].coin + "-" + result[index].buy + "-" + result[index].amount_b + "-" + result[index].stat;
// set up variables
coin = result[index].coin;
buy = result[index].buy;
rec = result[index].rec;
id = result[index].id;
amount = result[index].amount_b;
console.log('\x1b[36m%s\x1b[0m', str); // set color green. Display str
checkprice(coin, buy, rec, id, amount); //check Binance today price for the coin.The function will execute sometimes
} // end of loop
console.log('\x1b[36m%s\x1b[0m', str); // set color green. Display str
});
//check single coin price using binance api
function checkprice(coin, buy, rec, id, amount) {
const binance = require('node-binance-api')().options({
APIKEY: '<key>',
APISECRET: '<secret>',
useServerTime: true,
test: true //sandbox does not work
});
binance.prices(coin, (error, ticker) => {
act = "Nothing"; // default value
pricenow = ticker[coin]; // note ticker[coin]
if (pricenow < buy) {
show(id, rec, coin, buy, amount, pricenow);// Display sometimes then call book()
} else { console.log(coin, pricenow, buy, act, '\n'); }
});
}
function show(id, rec, coin, buy, amount, pricenow) {
delta = buy - pricenow; // posted trigger - today price
delta = delta.toFixed(8);
console.log('\x1b[31m%s\x1b[0m', coin, buy, amount, id, rec, ">BUY", delta); //display entries from alarms higher that today price
book(id, rec, coin, buy, amount, pricenow);
}
// dummy function to be replaced with a buy api order
function book(id, rec, coin, buy, amount, pricenow) {
const binance = require('node-binance-api')().options({
APIKEY: '<key>',
APISECRET: '<secret>',
useServerTime: true,
test: true //sandbox
});
console.log("Order:buy what??", coin, "amount:", amount, '\n');
/* binance.prices(coin, (error, ticker) => {
console.log("booking",coin, ticker[coin]);
update(id,rec);
}); */
update(id, rec, amount); // update mySql table. Slow but sure
}
function update(id, rec, amount) {
var sql = "UPDATE alarms SET stat = 'BOUGHT' ,today =
CONVERT_TZ(now(), '+00:00', '+7:00') WHERE id = "+id+" AND rec = "+rec;
con.query(sql, function (err, result) {
if (err) throw err;
console.log(result.affectedRows + " record updated");
// keep a log.tx
fs.appendFile('./log.txt', aww + " bought " + id + "-" + rec + "-" + amount + "\n",
function (err) {
if (err) { console.log(err); }
})
});
}
// I could check if all rows are done and raise an event? (how to do it)
} // end
To make mySell as the callback method of myBuy, invoke the myBuy method using the following structure.
myBuy(() => {
// operation of mySell method
});
And your myBuy method should return the callback, after perform its own operation.
exports.myBuy = function(cb) {
// operation of myBuy method
return cb; // return to the mySell method
}

Nested Promises in node.js and pg

I am new to node and writing a small application. I haven't used a language as asynchronous as this on the server before and have myself in a bit of a pickle. I need to take a string, query a table for an id, then insert in a second table using the result, then return a string from the funtion two levels up. I have a custom dao I use for the db stuff. Here is the function where it all happens:
function generateToken(data, userId, client) {
var random = Math.floor(Math.random() * 100001);
var sha256 = crypto.createHmac("sha256", random );
var token = sha256.update(data).digest("base64");
var query = dao.select(
'auth.apps',
{
name: client.name,
version: client.version,
subversion: client.subversion,
patch: client.patch
}
).done(
function(result) {
dao.insert(
'auth.tokens',
{
user_id:userId,
app_id: result.rows[0].id,
token:token
}
);
return "mmmm yellllo";
}
);
var ret_val = await(query);
console.log("Token return: " + ret_val);
return ret_val;
}
and here is the relevant part of my dao for select:
dbo.prototype.select = function(table, where, order_by) {
var where_clause = this.construct_where(where);
var sql = 'SELECT * FROM ' + table + ' WHERE ' + where_clause;
if(order_by !== undefined) {
sql = sql + ' ORDER BY ' + order_by;
};
var result = this.pool.query(sql);
return result;
};
and insert:
dbo.prototype.insert= function(table, values) {
var key_list='', value_list = '';
for( var k in values)
{
key_list = key_list + ', ' + k;
value_list = value_list + ", '" + values[k] + "'";
}
// chop off comma space
key_list = key_list.substring(2);
value_list = value_list.substring(2);
var sql = 'INSERT INTO ' + table + '(' + key_list + ') VALUES(' + value_list + ') RETURNING id';
var result = this.pool.query(sql).catch(function(error) {
console.log("SQL:" + sql + " error:" + error);
});
return result;
};
How do unwind the double promise. I want the generateToken function to return the token variable but only after the insert query has finished.
There is a library named deasync.
And the motivation to create it was to solve the situations when
API cannot be changed to return merely a promise or demand a callback
parameter
So this is the primary and probably the only use case. Because in general Node.js should stay async.
To do the trick you basically should write a function that accepts a callback and then wrap it with deasync as follows:
var deasync = require('deasync');
//It can still take the params before the callback
var asyncGenerateToken = function (data, userId, client, callback) {
var token = 'abc';
//Async operation starts here
setTimeout(function () {
//Async operation is finished, now we can return the token
//Don't forget that the error is 1st arg, data is the 2nd
callback(null, token);
}, 1000);
};
var generateToken = deasync(asyncGenerateToken);
//We'll retrieve a token only after a second of waiting
var token = generateToken('my data', 'my user id', 'my client');
console.log(token);
Hope this helps.

csv table to postgresql db with nodejs

Hey guys i cant implement the function in nodejs what can parse data from csv to postgresql table. Can u help with this question?
I'm trying to use the csv-stream library and can get the datas in console, but how to do an import to postgresql no idea.
var csvStream = csv.createStream(options);
fs.createReadStream(process.argv[2]).pipe(csvStream)
.on('error', function (err) {
console.error(err);
})
.on('data', function (data) {
// outputs an object containing a set of key/value pair representing a line found in the csv file.
// console.log(data);
})
.on('column', function (key, value) {
// outputs the column name associated with the value found
// console.log('#' + key + ' = ' + value);
console.log('# ' + value);
})
UPDATE
var pg = require("pg");
var conString = "pg://admin:guest#localhost:5432/Employees";
var client = new pg.Client(conString);
client.connect();
var query = client.query("SELECT firstname, lastname FROM emps ORDER BY lastname, firstname");
and now how i can get this select query to csv?
UPDATE 0.1
var pg = require('pg');
var csv = require('csv');
var csvWriter = require('csv-write-stream')
var conString = "pg://admin:admin#localhost:5432/labels";
var client = new pg.Client(conString);
client.connect();
var sql = "COPY test TO 'personss.csv' DELIMITER ',' CSV HEADER;"
var query = client.query("SELECT firstname, lastname FROM test ORDER BY lastname, firstname");
query.on("row", function (row, result) {
result.addRow(row);
});
query.on("end", function (result) {
console.log((result.rows, null, " "));
client.end();
});
Now probably i need to do each cycles in result.row.

Azure Node.js Table Entity Update

I'm trying to update a table entity during data insertion using server Node script. Here is what I'm trying to do. I have a Address table which has a column named geolocation of type "geography". When a user updates the Address, I'm using npm-geocoder to get the latitude and longitude to update the geolocation column.
Here is the code snippet.
var table = module.exports = require('azure-mobile-apps').table();
table.dynamicSchema = true;
table.insert(function (context) {
var address = context.item.lines1 + ' ' + context.item.lines2 + ' ' + context.item.city + ' ' + context.item.state + ' ' + context.item.zip;
var geocoderProvider = 'google';
var httpAdapter = 'https';
var extra = {
apiKey: '',
formatter: null
};
var geocoder = require('node-geocoder')(geocoderProvider, httpAdapter, extra);
geocoder.geocode(address)
.then(function(res) {
var geolocation = "POINT("+ res[0].longitude + " " + res[0].latitude +")";
console.log("Value of Geolocation is ", geolocation);
context.item.geolocation = geolocation;
})
.catch(function(err) {
console.log("Error ", err);
});
return context.execute();
});
However, i don't see the table being updated with the geolocation. Any pointers?
I looked at few samples available online but they are mostly based on previous mobile services where the insert method signature is different for ex:-
function insert(item, user, request) {
var queryString = "INSERT INTO Place (title, description, location) VALUES (?, ?, geography::STPointFromText('POINT(' + ? + ' '
+ ? + ')', 4326))";
mssql.query(queryString, [item.title, item.description, item.longitude.toString(), item.latitude.toString()], {
success: function() {
request.respond(statusCodes.OK, {});
}
});
}
context.execute() is being called before the geocode() promise is resolved. Move context.execute() inside the callback and return the promise from the function...
table.insert(function (context) {
// ...
var geocoder = require('node-geocoder')(geocoderProvider, httpAdapter, extra);
return geocoder.geocode(address)
.then(function(res) {
var geolocation = "POINT("+ res[0].longitude + " " + res[0].latitude +")";
console.log("Value of Geolocation is ", geolocation);
context.item.geolocation = geolocation;
return context.execute();
})
.catch(function(err) {
console.log("Error ", err);
});
});
Per my experience, I think you can try to use the update operation instead of the insert operation for the table to update an existed object. And if the table had the column named geolocation, the dynamicSchema property enabled seems to be not necessary for the table.
Meanwhile, you can try to check the table access property for the update operation, please see the setions How to: Require Authentication for access to tables & How to: Disable access to specific table operations of the doc https://azure.microsoft.com/en-us/documentation/articles/app-service-mobile-node-backend-how-to-use-server-sdk/.

Resources