I'm trying to update a table entity during data insertion using server Node script. Here is what I'm trying to do. I have a Address table which has a column named geolocation of type "geography". When a user updates the Address, I'm using npm-geocoder to get the latitude and longitude to update the geolocation column.
Here is the code snippet.
var table = module.exports = require('azure-mobile-apps').table();
table.dynamicSchema = true;
table.insert(function (context) {
var address = context.item.lines1 + ' ' + context.item.lines2 + ' ' + context.item.city + ' ' + context.item.state + ' ' + context.item.zip;
var geocoderProvider = 'google';
var httpAdapter = 'https';
var extra = {
apiKey: '',
formatter: null
};
var geocoder = require('node-geocoder')(geocoderProvider, httpAdapter, extra);
geocoder.geocode(address)
.then(function(res) {
var geolocation = "POINT("+ res[0].longitude + " " + res[0].latitude +")";
console.log("Value of Geolocation is ", geolocation);
context.item.geolocation = geolocation;
})
.catch(function(err) {
console.log("Error ", err);
});
return context.execute();
});
However, i don't see the table being updated with the geolocation. Any pointers?
I looked at few samples available online but they are mostly based on previous mobile services where the insert method signature is different for ex:-
function insert(item, user, request) {
var queryString = "INSERT INTO Place (title, description, location) VALUES (?, ?, geography::STPointFromText('POINT(' + ? + ' '
+ ? + ')', 4326))";
mssql.query(queryString, [item.title, item.description, item.longitude.toString(), item.latitude.toString()], {
success: function() {
request.respond(statusCodes.OK, {});
}
});
}
context.execute() is being called before the geocode() promise is resolved. Move context.execute() inside the callback and return the promise from the function...
table.insert(function (context) {
// ...
var geocoder = require('node-geocoder')(geocoderProvider, httpAdapter, extra);
return geocoder.geocode(address)
.then(function(res) {
var geolocation = "POINT("+ res[0].longitude + " " + res[0].latitude +")";
console.log("Value of Geolocation is ", geolocation);
context.item.geolocation = geolocation;
return context.execute();
})
.catch(function(err) {
console.log("Error ", err);
});
});
Per my experience, I think you can try to use the update operation instead of the insert operation for the table to update an existed object. And if the table had the column named geolocation, the dynamicSchema property enabled seems to be not necessary for the table.
Meanwhile, you can try to check the table access property for the update operation, please see the setions How to: Require Authentication for access to tables & How to: Disable access to specific table operations of the doc https://azure.microsoft.com/en-us/documentation/articles/app-service-mobile-node-backend-how-to-use-server-sdk/.
Related
I am trying to the table creation date and time in Google Spanner but I dont see any option exist. Even there is no information available in the information.schema
In Spanner, you cannot get a create_time for table and I believe that's due to to how Spanner table is designed and shared to multiple regions thus you cannot get a reliable create_time. A workaround would be; when you create a table you can record that time and save it to another table. This way you will have a register for all your table creation time. A code for this would be:
// Imports the Google Cloud client library
const {Spanner} = require('#google-cloud/spanner');
// Creates a client
const projectId = "PROJECTID"
const instanceId = "INSTANCEID";
const databaseId = "DATABASEID";
const spanner = new Spanner({projectId});
// Gets a reference to a Cloud Spanner instance and database
const instance = spanner.instance(instanceId);
const database = instance.database(databaseId);
// create the table TablesMeta that will holds your tables creation time
const tableMetaschema = [
'CREATE TABLE TablesMeta (' +
' tableId INT64 NOT NULL,' +
' TableName STRING(1024),'+
' TableCreateTime DATE' +
') PRIMARY KEY(tableId)'
];
database.updateSchema(TablesMeta)
// create a new table and add its creation time to the TablesMeta table
const singerSchema = [
'CREATE TABLE Singers (' +
' SingerId INT64 NOT NULL,' +
' FirstName STRING(1024),' +
' LastName STRING(1024),' +
' SingerInfo BYTES(MAX),' +
') PRIMARY KEY(SingerId)'
];
database.updateSchema(singerSchema).then(() => {
database.runTransaction(async (err, transaction) => {
if (err) {
console.error(err);
return;
}
try {
const [rowCount] = await transaction.runUpdate({
sql:
'INSERT TablesMeta (tableId, TableName, TableCreateTime) VALUES (1, #TableName, #TableCreateTime)',
params: {
TableName: 'Singers',
TableCreateTime: Spanner.date(),
},
});
console.log(
`Successfully inserted ${rowCount} record into the TablesMeta table.`
);
await transaction.commit();
} catch (err) {
console.error('ERROR:', err);
} finally {
// Close the database when finished.
database.close();
}
});
})
I have fetched data from Google Trends API and now I am trying to insert it in the database but at a time only one row is being inserted and I want to insert all the rows into the database.
try {
var res = await googleTrends.interestByRegion({keyword:keyword,geo:"US-"+state}); //resolution:keyword
res = JSON.parse(res);
// for debugging.
//console.log(res);
if("default" in res){
if("geoMapData" in res["default"]){
for(var x in res["default"].geoMapData){
var row = res["default"].geoMapData[x];
if("geoName" in row && "value" in row){
// console.log(" - " + row.geoName + "\t\t" + row.value);
console.log(">>>>>>>>>>>>>>>>>>",x,row.geoName + row.value)
//insert statement
var name1 = [row.geoName]
// name1.push([keyword,row.geoName])
// var name2 = [row.geoName]
pool.query("INSERT INTO practice(state,keyword) VALUES($1)",name1,(err, res) => {
console.log(err, res);
// pool.end(() => {
// console.log('pool has ended')
//})
});
I am getting this error after compiling.
error: INSERT has more target columns than expressions
at Connection.parseE (/home/abc/Desktop/node2/node_modules/pg/lib/connection.js:604:13)
at Connection.parseMessage (/home/abc/Desktop/node2/node_modules/pg/lib/connection.js:403:19)
at Socket.<anonymous> (/home/abc/Desktop/node2/node_modules/pg/lib/connection.js:123:22
I have an issue with not able to get the affected rows result from the following
During the debug I notice it always crashes at conn.querySync(query.sqlUpdate, params);
Console.log is not showing anything as well.
What did I do wrong here?
CODE
//imports
const format = require('string-format');
const query = require('../db/query');
const message = require('../common/message');
const constant = require('../common/constant');
var ibmdb = require("ibm_db");
require('dotenv').config();
// access the environment variables for this environment
const database = "DATABASE=" + process.env.DATABASE + ";";
const hostname = "HOSTNAME=" + process.env.HOSTNAME + ";";
const uid = "UID=" + process.env.UID + ";";
const pwd = "PWD=" + process.env.PWD + ";";
const dbport = "PORT=" + process.env.DBPORT + ";";
const protocol = "PROTOCOL=" + process.env.PROTOCOL;
const connString = database+hostname+uid+pwd+dbport+protocol;
function updateContact(params) {
ibmdb.open(connString, function(err, conn){
//blocks until the query is completed and all data has been acquired
var rows = conn.querySync(query.sqlUpdate, params);
console.log(rows);
});
}
module.exports.updateContact = updateContact;
I finally understand what the problem is.
The problem lies in me using the querySync function. This function not return affected row counts.
https://github.com/ibmdb/node-ibm_db/blob/master/APIDocumentation.md#querySyncApi
The proper way is to use prepare followed by executeNonQuery.
https://github.com/ibmdb/node-ibm_db/blob/master/APIDocumentation.md#executeNonQueryApi
So from the API, i modify my codes.
...
conn.prepare(query.SQL_UPDATE, function (error, stmt) {
if (err) {
console.log(err);
return conn.closeSync();
}
stmt.executeNonQuery(params, function (err, result) {
if( err ) {
console.log(err);
}
else {
console.log("Affected rows = " + result);
}
//Close the connection
conn.close();
});
});
...
I think I'm very close to what I want to do. I have the following api get method in node.js that is retrieving a file varbinary(MAX) from an SQL Server database. It was converted from a base64 encoded string before inserted so the Content Type information was stripped from the string.
node.js
router.get('/getFile', (req, res) => {
console.log("Calling getFile for file " + req.query.serialNumber + ".")
var serialNumber = req.query.serialNumber;
let request = new sql.Request(conn);
request.query('SELECT FileName + \'.\' + FileExtension AS \'File\', FileType, ContentType, SerialNumber, Chart ' +
'FROM dbo.ChangeFiles ' +
'WHERE SerialNumber = ' + serialNumber)
.then(function (recordset) {
log("Successfully retrieved file " + recordset[0].SerialNumber + " from database.");
log("Length of blob " + recordset[0].File + " is " + recordset[0].Chart.length)
res.status(200);
res.setHeader('Content-Type', recordset[0].ContentType);
res.setHeader('Content-Disposition', 'attachment;filename=' + recordset[0].File);
res.end(Buffer.from((recordset[0].Chart)));
}).catch(function (err) {
log(err);
res.status(500).send("Issue querying database!");
});
});
That works fine, but what to do in Angular to retrieve it and prompt for a download for the user has not been clear for me, nor has there been a lot as far as help/resources online. Here is what I have so far in my service class.
fileDownload.service.ts
downloadFile(serialNumber: string): Observable<any> {
return this.http.get(this.baseURL + '/getFile', { params: { serialNumber: serialNumber } })
.map(this.extractFile);
}
private extractFile(response: Response) {
const file = new Blob([response.blob]);
FileSaver.saveAs(file);
// const url = window.URL.createObjectURL(file);
// window.open(url);
return file;
}
As you can see I've tried a couple of approaches. The commented out portion of the extractFile method didn't work at all, and using the FileSaver.saveAs function produces a file download of an unknown type, so the headers sent from node.js didn't seem to affect the file itself.
Would someone be able to advise how to proceed in Angular with what is successfully being sent from node.js so that I can successfully download the file, regardless of type?
Thanks so much in advance.
I got it working afterall. I had to rework the api call so that it sent all of the file information separately so that the MIME type, and file name can be assigned to the file on the client side in the component class. For some reason when I tried to do so all in the api, it wouldn't work so that was my work around. So here is what works for me.
node.js api
router.get('/getFile', (req, res) => {
console.log("Calling getFile for file " + req.query.serialNumber + ".")
var serialNumber = req.query.serialNumber;
let request = new sql.Request(conn);
request.query('SELECT FileName + \'.\' + FileExtension AS \'File\', FileType, ContentType, SerialNumber, Chart ' +
'FROM dbo.ChangeFiles ' +
'WHERE SerialNumber = ' + serialNumber)
.then(function (recordset) {
log("Successfully retrieved file " + recordset[0].SerialNumber + " from database.");
log("Length of blob " + recordset[0].File + " is " + recordset[0].Chart.length)
res.send(recordset[0]);
}).catch(function (err) {
log(err);
res.status(500).send("Issue querying database!");
});
});
component class
downloadFile(serialNumber: string): void {
this.changeService.downloadFile(serialNumber).subscribe((res: any) => {
const ab = new ArrayBuffer(res.Chart.data.length);
const view = new Uint8Array(ab);
for (let i = 0; i < res.Chart.data.length; i++) {
view[i] = res.Chart.data[i];
}
const file = new Blob([ab], { type: res.ContentType });
FileSaver.saveAs(file, res.File);
console.log(res);
});
}
service class
downloadFile(serialNumber: string): Observable<any> {
return this.http.get(this.baseURL + '/getFile', { params: { serialNumber: serialNumber } })
.map(this.extractFile);
}
private extractFile(response: Response) {
// const file = new Blob([response.blob]);
// FileSaver.saveAs(file);
// const url = window.URL.createObjectURL(file);
// window.open(url);
const body = response.json();
return body || {};
}
Update your code to call subscribe instead of map
I am new to node and writing a small application. I haven't used a language as asynchronous as this on the server before and have myself in a bit of a pickle. I need to take a string, query a table for an id, then insert in a second table using the result, then return a string from the funtion two levels up. I have a custom dao I use for the db stuff. Here is the function where it all happens:
function generateToken(data, userId, client) {
var random = Math.floor(Math.random() * 100001);
var sha256 = crypto.createHmac("sha256", random );
var token = sha256.update(data).digest("base64");
var query = dao.select(
'auth.apps',
{
name: client.name,
version: client.version,
subversion: client.subversion,
patch: client.patch
}
).done(
function(result) {
dao.insert(
'auth.tokens',
{
user_id:userId,
app_id: result.rows[0].id,
token:token
}
);
return "mmmm yellllo";
}
);
var ret_val = await(query);
console.log("Token return: " + ret_val);
return ret_val;
}
and here is the relevant part of my dao for select:
dbo.prototype.select = function(table, where, order_by) {
var where_clause = this.construct_where(where);
var sql = 'SELECT * FROM ' + table + ' WHERE ' + where_clause;
if(order_by !== undefined) {
sql = sql + ' ORDER BY ' + order_by;
};
var result = this.pool.query(sql);
return result;
};
and insert:
dbo.prototype.insert= function(table, values) {
var key_list='', value_list = '';
for( var k in values)
{
key_list = key_list + ', ' + k;
value_list = value_list + ", '" + values[k] + "'";
}
// chop off comma space
key_list = key_list.substring(2);
value_list = value_list.substring(2);
var sql = 'INSERT INTO ' + table + '(' + key_list + ') VALUES(' + value_list + ') RETURNING id';
var result = this.pool.query(sql).catch(function(error) {
console.log("SQL:" + sql + " error:" + error);
});
return result;
};
How do unwind the double promise. I want the generateToken function to return the token variable but only after the insert query has finished.
There is a library named deasync.
And the motivation to create it was to solve the situations when
API cannot be changed to return merely a promise or demand a callback
parameter
So this is the primary and probably the only use case. Because in general Node.js should stay async.
To do the trick you basically should write a function that accepts a callback and then wrap it with deasync as follows:
var deasync = require('deasync');
//It can still take the params before the callback
var asyncGenerateToken = function (data, userId, client, callback) {
var token = 'abc';
//Async operation starts here
setTimeout(function () {
//Async operation is finished, now we can return the token
//Don't forget that the error is 1st arg, data is the 2nd
callback(null, token);
}, 1000);
};
var generateToken = deasync(asyncGenerateToken);
//We'll retrieve a token only after a second of waiting
var token = generateToken('my data', 'my user id', 'my client');
console.log(token);
Hope this helps.