I'm working on a task that has some insert/update on some table.
when I use the full columns of the table in the insert/update statements it works for me
but whenever I'm only updating the required columns and neglect the remaining to be untouched I face the ORA-01036
I'm calling a generic Lambda function passing the query and parameters
Success scenario :
{
"stage": "dev",
"params": {
"id": 5956049,
"groupName": "testtoberemoved123",
"externalName": "Axiom_420K_Wheattest547",
"description": "desc 123",
"createdOn": "2018-08-27T22:00:00.000Z",
"createdBy": "EOM",
"updatedOn": "2018-08-28T16:16:41.207Z",
"updatedBy": "EOM",
"status": 1,
"vendorID": null,
"technologyCode": null
},
"query": "update assay_group set NAME=:groupName , EXTERNAL_NAME=:externalName, DESCRIPTION=:description ,CREATED_DATE=to_timestamp_tz( :createdOn, 'yyyy-mm-dd"T"hh24:mi:ss:ff3 TZH:TZM'),CREATED_USER=:createdBy ,LAST_UPDATED_DATE=to_timestamp_tz( :updatedOn, 'yyyy-mm-dd"T"hh24:mi:ss:ff3 TZH:TZM'),LAST_UPDATED_USER=:updatedBy ,GROUP_STATUS=:status,VENDOR_ID=:vendorID,TECHNOLOGY_CODE=:technologyCode where ID=:id",
"enableObjectFormat": true,
"options": {
"autoCommit": true
}
}
this one runs successfully , but just when removing some columns from the statement it fails as below scenario :
{
"stage": "dev",
"params": {
"id": 5956049,
"groupName": "testtoberemoved123",
"externalName": "Axiom_420K_Wheattest547",
"description": "desc 123",
"createdOn": "2018-08-27T22:00:00.000Z",
"createdBy": "EOM",
"updatedOn": "2018-08-28T16:09:36.215Z",
"updatedBy": "EOM",
"status": 3,
"vendorID": null,
"technologyCode": null
},
"query": "update assay_group set NAME=:groupName where ID=:id",
"enableObjectFormat": true,
"options": {
"autoCommit": true
}
}
and this results in the following error :
{"errorMessage":"Error while executing query - ORA-01036: illegal variable name/number\n",
Generic executor as below
`
'use strict';
var oracledb = require("oracledb-for-lambda");
var dbConfig = require('./resources/dbConfig-dev.js');
module.exports.executeQuery= (event, context, callback) => {
var maxSize = parseInt(process.env.maxRows, 10);
// Extract enableJSONParse option
var enableJSONParse = false;
if(event.enableJSONParse != null && event.enableJSONParse != undefined) {
enableJSONParse = event.enableJSONParse;
console.log("enableJSONParse provided in event");
}
console.log("Enable JSON Parse: " + enableJSONParse);
// Extract options
var options = {};
if(event.options != null && event.options != undefined) {
options = event.options;
console.log("options provided in event");
}
// Add maxSize to options
options.maxRows = maxSize;
console.log("Options: " + JSON.stringify(options));
// Set oracledb output format to object
var enableObjectFormat = event.enableObjectFormat;
console.log("Enable Object Format: " + enableObjectFormat);
if(enableObjectFormat) {
console.log("Object Format Enabled");
oracledb.outFormat = oracledb.OBJECT;
} else {
oracledb.outFormat = oracledb.ARRAY;
}
console.log("oracledb.outFormat: " + oracledb.outFormat);
var currentStage = event.stage;
console.log("Current Stage: " + currentStage);
if (currentStage != null && currentStage != 'undefined') {
var configFileName = './resources/dbConfig-' + currentStage + '.js'
try{
dbConfig = require(configFileName);
} catch (error) {
callback(new InternalServerError("No dbConfig found - " + error.message));
return;
}
}
console.log("Using dbConfig: " + JSON.stringify(dbConfig));
var response = "";
var parameters = event.params;
var query = event.query;
if(query == null || query == undefined || query == "") { // Empty Query - throw error
console.log("Missing required field - query")
callback(new MissingRequiredFieldError("Missing Required Field - query"));
return;
}
if(parameters == null || parameters == undefined) { // parameters not provided in event - set to empty list
console.log("No parameters defined");
parameters = [];
}
console.log("Query: " + query);
console.log("Query Parameters: " + parameters);
oracledb.getConnection(
{
user : dbConfig.user,
password : dbConfig.password,
connectString :dbConfig.connectString
},
function(err, connection) {
if (err) {
console.error("Connection Error: " + err.message);
callback(new InternalServerError("Error while connecting to database - "+ err.message));
return;
}
// return all CLOBs as Strings
oracledb.fetchAsString = [ oracledb.CLOB ];
connection.execute(
// The statement to execute
query,
parameters, // Query Param
options, // Options
// The callback function handles the SQL execution results
function(err, result) {
if (err) {
console.error("Execution Error Messages = " + err.message);
doRelease(connection);
callback(new InternalServerError("Error while executing query - "+ err.message));
return;
}
console.log("Query " + query + " Executed Successfully");
var resultSet;
// In case query is SELECT
if(result.rows != null && result.rows != undefined) {
console.log("Returned rows: " + result.rows.length);
console.log("Result.metaData: " + JSON.stringify(result.metaData));
console.log("Result.rows: " + JSON.stringify(result.rows));
resultSet = result.rows;
try {
if(result.rows.length != undefined && result.rows.length == 0) {
resultSet = [];
} else if(enableJSONParse) {
if(result.rows[0][0].type == oracledb.CLOB) {
console.log("rows.type is CLOB");
resultSet = JSON.parse(result.rows[0][0]);
}
resultSet = JSON.parse(result.rows);
}
} catch(error) {
callback(new InternalServerError("Error while parsing result of query: "+error.message));
return;
}
} else { // In case query is INSERT/UPDATE/DELETE
console.log("Result.rowsAffected: " + result.rowsAffected);
if(result.rowsAffected > 0) {
resultSet = 'Executed Succesfully - Rows Affected: '+ result.rowsAffected;
} else {
resultSet = 'No rows affected';
}
}
doRelease(connection);
callback(null, resultSet);
});
});
// Note: connections should always be released when not needed
function doRelease(connection) {
connection.close(
function(err) {
if (err) {
console.error(err.message);
callback(new InternalServerError(err.message));
return;
}
});
}
};
`
The problem is that you are asking Oracle to set values for bind parameters that don't exist.
Let's consider your statement update assay_group set NAME=:groupName where ID=:id. Oracle will parse this and then run through your bind parameters. It will set values for groupName and id fine, and then it will get to the parameter named externalName. However, there is no bind parameter :externalName in your statement.
What's Oracle supposed to do with the value you've given to this non-existent parameter? You seem to be expecting Oracle to just ignore it. However, ignoring it isn't an option: if for example someone mistypes a parameter name this should in my opinion generate an error straight away rather than waiting until all the other parameters have been set and then complaining that one of them was missing.
You will have to pass to your executeQuery function the parameters that are used by the query or statement being executed and no others.
Related
I have a bot in the Microsoft bot Framework that I want to be able to pull data from an azure SQL database in order to answer questions asked to the bot. I have set up the database and it has some excel files in it.
Here is my code right now:
var Connection = require('tedious').Connection;
var Request = require('tedious').Request;
var connection = new Connection(dataconfig);
connection.on('connect', function(err) {
console.log("Connected");
executeStatement();
});
var Request = require('tedious').Request;
var TYPES = require('tedious').TYPES;
function executeStatement() {
request = new Request("select \"Product Name\" from SPA_Data_Feeds where \"Strategic Priority\" = 'Accelerate to Value (LD)'",
function(err, rowCount, rows)
{
console.log(rowCount + ' row(s) returned');
}
);
var result = "";
var count = 0
request.on('row', function(columns) {
columns.forEach(function(column) {
console.log("%s\t", column.value);
result+= column.value + "\t\n";
count++;
if ( count == rowCount ) {
ATVData(result);
} ;
});
});
connection.execSql(request);
}
function ATVData(result) { //Puts "result" inside of an adaptive card }
I cant seem to figure out how to get the if statement right. rowCount does not work because it does not wait for it to be defined by the function before first, and I have tried using things like column(s).length, result(s).length but none work.
Is there something else I could use that would complete the if statement? Or do I need to reformat somehow with callbacks/promises to get it to wait for rowCount to be defined? If so could I get some advice on that?
Is there something else I could use that would complete the if statement? Or do I need to reformat somehow with callbacks/promises to get it to wait for rowCount to be defined? If so could I get some advice on that?
We can use Q.js which is one of the JavaScript Promise implementation to solve this issue. For example:
var Connection = require('tedious').Connection;
var Request = require('tedious').Request;
var q = require('q');
// Create connection to database
var config =
{
userName: '', // update me
password: '', // update me
server: '', // update me
options:
{
database: '' //update me
, encrypt: true
}
}
var connection = new Connection(config);
// Attempt to connect and execute queries if connection goes through
connection.on('connect', function(err)
{
if (err)
{
console.log(err)
}
else
{
queryDatabase().then(function(result){
ATVData(result);
}, function(err){
console.log(err);
});
}
}
);
function queryDatabase()
{
console.log('Reading rows from the Table...');
//create a promise
var deferred = q.defer();
// Read all rows from table
var result = [];
var request = new Request(
"SELECT * From ForumMessages",
function(err, rowCount)
{
deferred.resolve(result);
});
request.on('row', function(columns) {
columns.forEach(function(column) {
console.log("%s\t%s", column.metadata.colName, column.value);
result.push(columns);
});
});
connection.execSql(request);
//return the promise
return deferred.promise;
}
function ATVData(result){
//your bot code goes here
}
I think to expand on Grace's answer, for each row, you can also do this for some utility:
request.on('row', function(columns) {
var singleResult = {};
columns.forEach(function(column) {
console.log("%s\t%s", column.metadata.colName, column.value);
// Add a property to the singleResult object.
singleResult[column.metadata.colName] = column.value;
// Push the singleResult object to the array.
result.push(singleResult);
});
});
Then you can, in your bot's code, call each object by the property name in dot notation, for example: result[x].colName where colName is the name of the column (or object property in this case).
Example (assuming at least one result item from the database, with a "link" column that has data):
var adaptiveCardExample = {
'contentType': 'application/vnd.microsoft.card.adaptive',
'content': {
'$schema': 'http://adaptivecards.io/schemas/adaptive-card.json',
'type': 'AdaptiveCard',
'version': '1.0',
'body': [
{
"type": "TextBlock",
"text": "Code Example"
},
{
"type": "TextBlock",
"text": "We're going to " + result[0].link,
"wrap": true
}],
'actions': [
{
'type': 'Action.OpenUrl',
'title': 'Go to the example link',
'url': result[0].link
}
]
}
};
var adaptiveCardMsg = new builder.Message(session).addAttachment(adaptiveCardExample);
session.send(adaptiveCardMsg);
You may want to add a check for null or undefined for the property in the case it is a nullable field in the database, as a precaution.
I have some issue with my application made on this model to save protocol record with nedb : Protocol (title, date, patient_name, notes).
Sometimes nearly 40% of times, it does not saves my record (insert or update) into the db... no error...
This is my save function :
document.getElementById('save').addEventListener('click', () => {
title = document.getElementById('title').value.trim();
date = document.getElementById('date_picker').value.trim();
date = (date == null) ? "" : date;
duration = document.getElementById('duration').value.trim();
cost = document.getElementById('cost').value.trim();
name = document.getElementById('name').value.trim();
name = (name == null) ? "" : name.trim();
nHistory = document.getElementById('nHistory').value.trim();
nHistory = (nHistory == null) ? "" : nHistory;
therapy = document.getElementById('therapy').value.trim();
therapy = (therapy == null) ? "" : therapy;
sound = document.getElementById('sound').value.trim();
sound = (sound == null) ? "" : sound;
if (title == "" || date == "" || title == "" || name == "" || nHistory == "" || therapy == "" || sound == "" ){
dialog.showMessageBox({
title :appName,
type: 'warning',
message : 'Please enter a value in all fields'
});
return;
} else {
selectedId = document.getElementById('selectedId').value;
selectedId = (selectedId == null) ? "" : selectedId.trim();
// create a protocol object
var protocol = {
"title": title,
"date": date,
"duration": duration,
"cost": cost,
"patient_name": name,
"nHistory": nHistory,
"therapy": therapy,
"sound": sound
}
if(selectedId == ""){ // insert query
database.addProtocol(protocol)
} else { // update query
database.updateProtocol({_id:selectedId}, protocol)
} // end if
// we redirect to the protocol list
var url = require('url');
var path = require('path');
var remote = require('electron').remote;
remote.getCurrentWindow().loadURL(url.format({
pathname: path.join(__dirname, '../pages/protocolList.html'),
protocol: 'file:',
slashes: true
}));
// We reset the selected protocol id
document.forms['form-edit'].selectedId = null;
sharedObject.selectedProtocolId = "";
} // end if
});
```
And this is our i perform saves into the db :
var DataStore = require('nedb');
db = {};
db.users = new DataStore({filename: 'rsc/db/users.db', autoload: true });
db.protocols = new DataStore({filename: 'rsc/db/protocols.db'});
// Get a single protocol
exports.getProtocol = function(query, fnc){
db.protocols.loadDatabase()
db.protocols.findOne(query, function (err, doc) {
if(err){
console.log("An error occured with the query : ", err); return;
} else {
// Execute the parameter function
fnc(doc);
}
});
}
// Returns the query protocols
exports.findProtocols = function(queryParams, fnc){
// Get the query protocols
db.protocols.loadDatabase();
db.protocols.find(queryParams, function (err, docs) {
if(err){
console.log("An error occured with the query : ", err); return;
} else {
//sort protocols by date
comparator = function(protocol1, protocol2) {
return new Date(protocol2.date).getTime() - new Date(protocol1.date).getTime();
}
docs = docs.sort(comparator);
// Execute the parameter function
fnc(docs);
}
});
};
// Adds a protocol
exports.addProtocol = function(protocol) {
// save the new protocol in the database
db.protocols.loadDatabase();
db.protocols.insert(protocol, function(err, newProtocol){
if(err) {
console.log("An error occured with the insert query", err); return;
} else {
console.log("protocol added...");
}
});
};
// Updates a protocol
exports.updateProtocol = function(where, protocol) {
// update the new protocol in the database
db.protocols.loadDatabase();
db.protocols.update(where, {$set : protocol}, {}, function(err){
if(err) {
console.log("An error occured with the update query", err); return;
} else {
console.log("protocol updated...");
}
});
};
//Deletes a protocol
exports.deleteProtocol = function(queryParam, fnc){
db.protocols.loadDatabase();
db.protocols.remove(queryParam, {}, function(err, numRemoved){
if(err) {
console.log("An error occured with the delete query", err); return;
} else {
console.log("protocol deleted...");
}
fnc();
});
}
```
Any idea will be welcomed, thanks !
There was an upgrade in the nedb library between the time I implemented and the time I got the error(the begin of the year),
After investigation, I just set all my Datastore instances to autoload as this one :
db.users = new DataStore({filename: 'rsc/db/users.db', autoload: true });
And the problem was solved.
This is how I firstly wished to implemented it, but at that time, it did not worked as excepted. So it appears as a bug that was solved in the library meanwhile.
I was writing a nest code, tried using async.waterfall or async.series but I am getting random results every time I refresh. it seems to be because of the queries of the first 2 functions randomly finishing.
first query was sorting to committed_date DESC. but when I add the 2 sub queries. the sort gets distorted.
Step1: Loop landingpages
Step1.1 - fetch details1 repositories
Step1.2 - fetch details2 versions
Step2: build array
db.collection('landingpages').find({is_deleted:{$ne:1}}).sort({committed_date:-1}).limit(10).toArray(function(err, db_results) {
var data_array = [];
var x=1;
if(db_results.length == 0) {
return_data.lps = data_array;
parallel_done();
}else{
async.each(db_results, function (db_results1, cb) {
async.watefall(
[
function(callback) {
//if this is removed or passed as callback(null, ""); the sort is fixed from committed - 1
var data_repo = {};
db.collection('repositories').find({repository_id: repository_id}).toArray(function(err, db_results1) {
if(db_results1.length == 0){
var data_repo = {};
callback(null, data_repo);
}else{
var data_repo = db_results1[0];
callback(null, data_repo);
}
});
},
function(callback) {
//if this is removed or passed as callback(null, ""); the sort is fixed from committed - 1
var data_version = {};
db.collection('versions').find({landingpage_id: landingpage_id}).sort({_id:-1}).limit(1).toArray(function(err, db_results1) {
if(db_results1.length == 0){
var data_version = {};
callback(null, data_version);
}else{
var data_version = db_results1[0];
callback(null, data_version);
}
});
}
],
function (err, data_repo,data_version) {
var document = {
"x": x++,
"landingpage_id": db_results1.landingpage_id,
"repository_id": db_results1.repository_id,
"version_id": data_version.version_id,
"val": db_results1,
"data_repo": data_repo,
"data_version": data_version,
};
data_array.push(document);
if(data_array.length == db_results.length) {
return_data.lps = data_array;
}
}
);
});
}
});
This code doesn't work and I couldn't find out why?
It is always pushing obj right serialized JSON string but it always return with wrong key. In obj id is regularly increasing but key isn't.
var c = redis.createClient(),
obj = {id:0, name:"dudu"},
key="person:";
c.select(0);
c.multi()
.incr("idx:person", function(err, _idx) {
console.log("incr -> #idx: " + _idx);
key += obj.id = _idx;
console.log("After Inc obj: " + JSON.stringify(obj));
})
.set(key, JSON.stringify(obj), function(err, _setResp) {
console.log("set -> #_setResp: " + _setResp);
console.log(JSON.stringify(ihale));
})
.get(key, function(er, _obj) {
console.log("get -> " + key);
if (er) {
res.json(er);
} else {
console.log("Found: " + JSON.stringify(_obj));
res.json(_obj);
}
})
.exec(function(err, replies) {
console.log("MULTI got " + replies.length + " replies");
replies.forEach(function(reply, index) {
console.log("Reply " + index + ": " + reply.toString());
});
});
c.quit();
This worked:
c.INCR("idx:person", function(a,b) {
obj.id = b;
console.dir(obj);
key = "pa:" + b;
c.set(key, JSON.stringify(obj), function(err, _setResp) {
console.log("set -> #_setResp: " + _setResp);
console.log(JSON.stringify(obj));
c.get(key, function(er, _obj) {
console.log("get -> " + key);
if (er) {
res.json(er);
} else {
console.log("Found: " + JSON.stringify(_obj));
res.json(_obj);
}
});
});
});
The way to do this is simple :)
Event driven node is executing every part inside of previous one:
c.INCR("idx:person", function(a,b) {
obj.id = b;
key = "pa:" + b;
c.set(key, JSON.stringify(obj), function(err, _setResp) {
c.get(key, function(er, _obj) {
if (er) {
res.json(er);
} else {
res.json(_obj);
}
});
});
});
In transaction mode command are grouped and passed to Redis . The Exec command execute code you passed . When you pass the key value to a the set command , there is no incremental key value on right of it.
For this kind of use, and if you still want have merged commands in one , script it In Lua:
local keyid = redis.call('INCR', 'idx:person')
local result = redis.call('SET', 'person:'..keyid,ARGV[1])
return 'person:'..keyid
for using it in a redis eval command :
eval "local keyid = redis.call('INCR', 'idx:person'); local result = redis.call('SET', 'person:'..keyid,ARGV[1]);return 'person:'..keyid" 0 "yourJSONObject"
this should work:
client.eval([ "local keyid = redis.call('INCR', 'idx:person'); local result = redis.call('SET', 'person:'..keyid,ARGV[1]);return result", 0,JSON.stringify(obj) ], function (err, res) {
console.log(res); // give the personID
});
You Can also use a Hash instead of a simple key in your example for separate id, name , and json object . Return the hash from the lua script will be like return it from a hset.
Have looked through the bluebird readMe examples, and am still wondering how to implement/convert some async code to involve promises with .then..
There are a number of ifStatements in here, though the main point is that while looping through the toArray, if the element exists in the database (findOne) then assign it to a variable to .push it to a field in an embedded doc of the new (.post & .save) db doc.
Here's the current async code that consequently runs the findOne after .save .. but it needs to run before:
// create a story (accessed at POST http://localhost:4200/api/v1/story)
.post(function(req, res) {
console.log('posting a new Story..from: ' + res.locals._id + '..' + res.locals.username );
var story = new Models.Story();
var toArray = req.body.to;
console.log(toArray); // [ 'user1', 'user2', 'user3' ]
toArray.forEach(toArrayLoop);
function toArrayLoop(element, index, array){
console.log('element: ' + element); // 'user1' .. 'user2' .. 'user3'
var out = false; // if sent to Self, out = true
if (element == res.locals.username) {out = true; console.log('to element: ' + element + ' == res.locals.username: ' + res.locals.username)}
var toUserId = '';
if (element) {
Models.User.findOne({username: element}, function (err, user) {
if (user) {
if (err) {
console.log(err);
res.send(err);
}
console.log('user._id = ' + user._id);
toUserId = user._id;
} else {
toUserId = '';
console.log('toUserId = ' + toUserId);
}
});
}
story.to.push({
user : toUserId, // push the findOne user._id
username : element, // push the toArray element
view :
{
inbox: true,
outbox: out,
archive: false,
},
updated : req.body.nowDatetime
});
}
var archive = false;
console.log('req.body.archive = ' + req.body.archive);
if (req.body.archive == 'true') { archive = true; console.log('archive = ' + archive); };
var in = false;
toArray.forEach(fromSelfLoop);
function fromSelfLoop(element, index, array){
console.log('checking if sent to Self: ' + element); // 'user1' .. if matches res.locals: (sent from/to Self)
if (element == res.locals.username) {in = true; console.log('from element: ' + element + ' == res.locals.username: ' + res.locals.username)}
} // if sent to Self, archive = true
story.from.push({
user : res.locals._id,
username : res.locals.username,
view :
{
inbox: in,
outbox: true,
archive: archive,
},
updated : req.body.nowDatetime
});
story.title = req.body.title;
// ..even more doc val assignments..
console.log('To: ' + req.body.to);
console.log('Story: ' + req.body.title);
story.save(function(err, result) {
if (err) {
console.log(err);
res.send(err);
}
console.log("The result: ", result);
res.json({ message: 'Story "' + story.title + '" Created' });
});
console.log('post success!');
})
You're way overkilling it in my opinion, Promises provide ways to synchronize this out of the box seamlessly.
You can use promise aggregation methods (in this case .join and .props) to map directly to the properties and get the values.
Assuming you promisified Mongoose (so Bluebird promises rather than Mongoose ones).
var story = new Models.Story();
var toArray = req.body.to; // [ 'user1', 'user2', 'user3' ]
var to = Promise.map(toArray,function(element){
return Promise.props({ // resolves all properties
user : Models.User.findOneAsync({username: element}),
username : element, // push the toArray element
view : {
inbox: true,
outbox: element == res.locals.user.username,
archive: false
},
updated : req.body.nowDatetime
});
});
var from = Promise.map(toArray,function(element){ // can be a normal map
return Promise.props({
user : res.locals._id,
username : res.locals.username,
view : {
inbox: element == res.locals.user.username,
outbox: true,
archive: archive,
},
updated : req.body.nowDatetime
});
});
Promise.join(to, from, function(to, from){
story.to = to;
story.from = from;
story.title = req.body.title;
return story.save();
}).then(function(){
console.log("Success! Story saved!");
}).catch(Promise.OperationalError, function(e){
// handle error in Mongoose save findOne etc, res.send(...)
}).catch(function(e){
// handle other exceptions here, this is most likely
// a 500 error where the top one is a 4XX, but pay close
// attention to how you handle errors here
});