Hi Can anyone give an example of how use insert statement in nodejs. I am able to use select query. But for insert query i am getting the result as []. no error can be seen but the values are not added to the original table. I am using db2, ibm_db,express,nodejs and angularjs.
I wrote a blog entry on using DB2 and node.js on Bluemix a while ago. It includes code for an INSERT statement.
As part of the insert
first prepare the statement,
then bind the values to be inserted and
finally execute the statement.
Here is the relevant code snippet, the full context is in the blog:
exports.insertIP = function(ibmdb,connString,ipinfo) {
console.log("insertIP called",ipinfo);
ibmdb.open(connString, function(err, conn) {
if (err ) {
res.send("error occurred " + err.message);
}
else {
// prepare the SQL statement
conn.prepare("INSERT INTO IP.VISITORS(vtime,ip,country_code,country,region_code,region,city,zip,latitude,longitude,metro,area) VALUES (current timestamp,?,?,?,?,?,?,?,?,?,?,?)", function(err, stmt) {
if (err) {
//could not prepare for some reason
console.log(err);
return conn.closeSync();
}
//Bind and Execute the statment asynchronously
stmt.execute([ipinfo["ip"],ipinfo["country_code"],ipinfo["country_name"],ipinfo["region_code"],ipinfo["region_name"],ipinfo["city"],ipinfo["zipcode"], ipinfo["latitude"], ipinfo["longitude"],ipinfo["metro_code"],ipinfo["area_code"]], function (err, result) {
console.log(err);
// Close the connection to the database
conn.close(function(){
console.log("Connection Closed");
});
});
});
}
})};
I would suggest and recommend (as one of the members of node-ibm_db) to follow the node-ibm_db github repository (https://github.com/ibmdb/node-ibm_db) , we have updated the README document as well as the list of APIs to do particular tasks.
For your above query you can use ".prepare(sql, callback)" or ".prepareSync(sql)" API (as per your requirements Async/sync call), below is the attached code snippet and URL link for particular API documentation.
var ibmdb = require("ibm_db"),
cn ="DATABASE=dbname;HOSTNAME=hostname;PORT=port;PROTOCOL=TCPIP;UID=dbuser;PWD=xxx";
ibmdb.open(cn,function(err,conn){
conn.prepare("insert into hits (col1, col2) VALUES (?, ?)",
function (err, stmt) {
if (err) {
//could not prepare for some reason
console.log(err);
return conn.closeSync();
}
//Bind and Execute the statment asynchronously
stmt.execute(['something', 42], function (err, result) {
if( err ) console.log(err);
else result.closeSync();
//Close the connection
conn.close(function(err){});
});
});
});
API documentation(Github URL) : https://github.com/ibmdb/node-ibm_db#-8-preparesql-callback
Try to install jt400 by using the below command
npm install node-jt400 --save
use the below code to insert the data to table name foo.
Follow link https://www.npmjs.com/package/node-jt400 for for details
pool
.insertAndGetId('INSERT INTO foo (bar, baz) VALUES(?,?)',[2,'b'])
.then(id => {
console.log('Inserted new row with id ' + id);
});
Related
I am using SOLR as my database and I have to delete all the records from the database where (UUID: 'a7d37405-fd5b-44de-b4bc-e748e9353f5d' and name:'Murtaza'). I can delete it by using just 1 field but it does not allow me to delete using 2 where clauses. Is there any way to do this?
I am using this code:
var objQuery = {UUID:'a7d37405-fd5b-44de-b4bc-e748e9353f5d'}
client.delete(objQuery, function(err, result) {
if (err) {
console.log(err);
return;
}
client.softCommit();
callback(result.responseHeader);
});
The inbuilt delete function only allows the 1 field+value as an argument, but you could use the deleteByQuery option:
http://lbdremy.github.io/solr-node-client/code/deleteByQuery.js.html
And make the query a conditional search for both fields:
var query = "UUID:'abcd-1234' AND name:Murtaza";
I found the solution finally! We have to use deleteByQuery function. The function code is:
var client =solr.createClient(options);
var query = 'UUID:814caeda-5ef1-4ecf-8260-222060d9907c AND toolId:mytoolID';
client.deleteByQuery(query,function(err,obj){
if(err){
console.log(err);
}else{
client.softCommit();
callback(obj.responseHeader);;
}
});
I met an annoyance when trying to use simple function to query data for my web app. The idea is to use one function to list the contents of one table; the other function to use the user-selected record_id in this table to query the detailed contents data in another table.
When running, the app ran the two functions without any error while no data got. Checked the console and found the second function query results is null (I console.log the input for the second function, found they got and using the correct query keys). Since I am sure database has the data for query.
I tried:
use psql command line to query data using the same query keys, I have the results without problem;
I run a node command line, and try to run the two functions by providing the query keys, it also gave me the correct results.
So the functions should work. Now my question is why put them in the app and let them run by themselves, they did not get the query results?
I am using pg = require("pg"); and const pool = new pg.Pool(config) for database connection;
Your sharing of your experience will be very appreciated.
(UPDATE)The functions are like below:
function listItemDB(callback){
pool.connect(function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
//use the client for executing the query
client.query(`SELECT * FROM basicdb.items`,
function(err, result) {
//call `done(err)` to release the client back to the pool (or destroy it if there is an error)
done(err);
if(err) {
return console.error('error running query', err);
}
// console.log(result.rows);
callback(result.rows);
});
});
}
The above function is only trying to get "item1" and "dataset1" for future use and pass them to below function args. It does its job perfectly.
function getFileName(itemName,datasetName, callback) {
let fileName;
console.log(itemName,datasetName);
pool.connect(function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
client.query("SELECT * "+
"FROM basicdb.dataset "+
"INNER JOIN basicdb.items "+
"ON basicdb.dataset.item_id = basicdb.items.item_id "+
"WHERE (basicdb.items.item_name = ($1)) "+
"AND (basicdb.dataset.datasetname = ($2))",[itemName,datasetName],
function (err, result){
done();
if(err) {
return console.error('error running query', err);
}
let records = result.rows;
fileName = records[records.length-1].filename;
callback(fileName);
});
});
}
This above function is trying to get the filename so the main app can use it. The code to call the above function in my main app.js is like below:
db.getFileName("item1","dataset1",function(fileName) {
//do something with the fileName....}
("db" is the module name which include the functions.)
I finally found the problem, which is a low-level mistake and has nothing to do with the database and the queries.
The item names got from the dropdown list in the app, which was feed to the function args, has one " "(space) attached to the end of the name(i dont know why?), which always "!=" the record in the database:-(, so always no query result in the app. But for the function test, I hardcode the item name which is correct "==" the record in the database. Since it is " ", even when I console.log(itemName), I did not find the space at the end.
It turns out to be 'A mistake of space'.
I'm trying to implement cursor pagination with postgresql and sequelize, I have tried with success creating a cursor WITH HOLD on a transaction and fetching data from it.
sequelize.transaction(function (t) {
const query = "DECLARE my_cur CURSOR WITH HOLD FOR SELECT foo, bar FROM x " +
"WHERE time=\'2016-09-16\'::date;"
return sequelize.query(query, {transaction: t}).spread(function (results,metadata) {
console.log("Cursor created!")
});
}).then(function (result) {
console.log(result)
console.log("Transaction commited")
sequelize.query("FETCH NEXT FROM my_cur").spread(function (results,metadata) {
console.log("Fetching from previously created cursor:")
console.log(results)
console.log(metadata)
}).catch(function(err){
console.log("Failed to fetch from cursor")
console.log(err)
});
}).catch(function (err) {
throw err
console.log("Failed to create a cursor")
});
If I try fetching from the cursor in a different session I get:
FETCH NEXT FROM my_cur;
ERROR: <<my_cur>> doesn't exist.
Cursors even if declared WITH HOLD are destroyed after a session closes and aren't shared between sessions, my question is, how does sequelize handle sessions with postgreSQL, will I be able to FETCH from this cursor on a separate API call?
socket.on("fetch_next_page", function()){
var cursor = socket.session.cursor
var pageSize = 10
sequelize.query("FETCH +"pageSize"+ FROM my_cur").spread(function (results,metadata) {
console.log("Fetching from previously created cursor:")
console.log(results)
socket.emit("page",results)
console.log(metadata)
}).catch(function(err){
console.log("Failed to fetch from cursor")
console.log(err)
});
}
The easiest way to execute a sequence of queries within the same session is via a task or transaction as implemented within pg-promise.
See Tasks and Transactions.
You would either promise-chain your queries, if they have a dependency between them, or execute them as a batch, if they don't.
And if you need to iterate N times inside the session, like calling FETCH till certain condition, you can also make use of method sequence.
my question is, on azure mobile service back-end when I run SQL insert on mssql.query like the one below
var sql = " INSERT INTO Customers
(CustomerName, ContactName) VALUES (?, ?); ";
mssql.query(sql, [item.CustomerName, item.ContactName], {
success: function(results) {
request.execute();
},
error: function(err) {
console.log("error is: " + err);
}
});
the data won't show up on azure portal website anymore. I know I can use the built in
todoItemTable.insert()
to insert, but sometimes the business logic is very complicated that it can only be done with in SQL. Is it the __version field that is causing the problem? If it is what should I put in when I insert?
Thanks!
Check your logs to see what might be going wrong. You do not need to worry about __version or other system columns when inserting a new record.
Is this in a table insert script? If so, you may not want request.execute() in your callback. That would insert the original record in addition to the record inserted in your mssql statement.
You may also have an issue because the mssql.query() can call its callback function multiple times depending on how many result messages the SQL produces. Have a variable like requestExecuted defined up with your sql variable, and in the mssql success callback, check it before executing the request.execute() call:
var requestExecuted = false;
mssql.query(sql, [item.CustomerName, item.ContactName], {
success: function(results) {
if (requestExecuted === false) {
requestExecuted = true;
request.execute();
}
},
error: function(err) {
console.log("error is: " + err);
}
});
If this doesn't get you going, try adding console.log statements in the callback to see if it is getting called and how many times. Update your question if you have any more details from errors in your log.
Hi im developing an app with nodeJS, express and a mongoDB, i need to take users data from a csv file and upload it to my database this db has a schema designed with mongoose.
but i don know how to do this, what is the best approach to read the csv file check for duplicates against the db and if the user (one column in the csv) is not here insert it?
are there some module to do this? or i need to build it from scratch? im pretty new to nodeJS
i need a few advices here
Thanks
this app have an angular frontend so the user can upload the file, maybe i should read the csv in the front end and transform it into an array for node, then insert it?
Use one of the several node.js csv libraries like this one, and then you can probably just run an upsert on the user name.
An upsert is an update query with the upsert flag set to true: {upsert: true}. This will insert a new record only if the search returns zero results. So you query may look something like this:
db.collection.update({username: userName}, newDocumentObj, {upsert: true})
Where userName is the current username you're working with and newDocumentObj is the json document that may need to be inserted.
However, if the query does return a result, it performs an update on those records.
EDIT:
I've decided that an upsert is not appropriate for this but I'm going to leave the description.
You're probably going to need to do two queries here, a find and a conditional insert. For this find query I'd use the toArray() function (instead of a stream) since you are expecting 0 or 1 results. Check if you got a result on the username and if not insert the data.
Read about node's mongodb library here.
EDIT in response to your comment:
It looks like you're reading data from a local csv file, so you should be able to structure you program like:
function connect(callback) {
connStr = 'mongodb://' + host + ':' + port + '/' + schema; //command line args, may or may not be needed, hard code if not I guess
MongoClient.connect(connStr, function(err, db) {
if(err) {
callback(err, null);
} else {
colObj = db.collection(collection); //command line arg, hard code if not needed
callback(null, colObj);
}
});
}
connect(function(err, colObj) {
if(err) {
console.log('Error:', err.stack);
process.exit(0);
} else {
console.log('Connected');
doWork(colObj, function(err) {
if(err) {
console.log(err.stack);
process.exit(0);
}
});
}
});
function doWork(colObj, callback) {
csv().from('/path/to/file.csv').on('data', function(data) {
//mongo query(colObj.find) for data.username or however the data is structured
//inside callback for colObj.find, check for results, if no results insert data with colObj.insert, callback for doWork inside callback for insert or else of find query check
});
}