SyntaxError: Unexpected identifier in json function - node.js

I am going to build a weather data pipeline that starts with an Internet of Things (IoT) device, utilizes a message queue to receive and deliver data, leverages a serverless function to move the data to a data warehouse and then create a dashboard that displays the information. I am getting error in function.
/**
* Background Cloud Function to be triggered by PubSub.
*
* #param{
object
}event The Cloud Functions event.
* #param{
function
}callback The callback function.
*/
exports.subscribe = function (event,
callback){
const BigQuery = require('#google-cloud/bigquery');
const projectId = "iot2analytics-ca4"; //Enter your project ID here
const datasetId = "weatherData"; //Enter your BigQuery dataset name here
const tableId = "weatherDatatable"; //Enter your BigQuery table name here -- make sure it is setup correctly
const PubSubMessage = event.data;
// Incoming data is in JSON format
const incomingData = PubSubMessage.data ? Buffer.from(PubSubMessage.data,
'base64' ).toString():"{'sensorID':'na','timecollected':'1/1/1970 00:00:00','zipcode':'00000','latitude':'0.0','longitude':'0.0','temperature':'-273','humidity':'-1','dewpoint':'-273','pressure':'0'}" ;
const jsonData = JSON.parse(incomingData);
var rows = [
jsonData
] ;
console.log(`Uploading data:$ {
JSON.stringify(rows)
} ` );
// Instantiates a client
const bigquery = BigQuery( {
projectId:projectId
} );
// Inserts data into a table
bigquery
.dataset(datasetId)
.table(tableId)
.insert(rows)
.then((foundErrors) => {
rows.forEach((row) => console.log('Inserted:', row));
if (foundErrors && foundErrors.insertErrors != undefined) {
foundErrors.forEach((err) => {
console.log(' Error:', err);
})
}
})
.catch((err) => {
console.error(' ERROR:',
err);
} );
// [
END bigquery_insert_stream
] callback();
};

You are using single quotes to wrap the string in the JSON object which is not allowed with standard formats. Please replace the single quotes with double quotes and then stringify your object. Here, use this
let temp = {
"sensorID":"na",
"timecollected":"1/1/1970 00:00:00",
"zipcode":"00000",
"latitude":"0.0",
"longitude":"0.0",
"temperature":"-273",
"humidity":"-1",
"dewpoint":"-273",
"pressure":"0"
}
temp = JSON.stringify(temp)
const incomingData = PubSubMessage.data ? Buffer.from(PubSubMessage.data,
'base64' ).toString(): temp;

Related

How to execute SQL function in Azure function project

I am working on an azure function that is invoking a web service in node.js and it works fine. I have a function GetDetails to make an SQL query to retreive data from sql server database.
const sql = require("mssql");
const dataSQL = {};
const GUID = "";
const navServiceKey = "";
const navUserName = "";
async function GetDetails() {
var email = "yamen#gmail.com";
var password = "password";
try {
console.log("nav service" + navServiceKey);
// make sure that any items are correctly URL encoded in the connection string
await sql.connect(
"Server=tcp:app.windows.net,1433;Database=BHUB_TEST;User Id=AppUser;Password=password;Encrypt=true MultipleActiveResultSets=False;TrustServerCertificate=False;ConnectionTimeout=30;"
);
const result =
await sql.query`select * from users where email = ${email} AND password = ${password} `;
if (result.rowsAffected[0] >= 1) {
dataSQL = result.recordset[0];
navServiceKey = JSON.stringify(dataSQL.navServiceKey);
GUID = JSON.stringify(dataSQL.userGUID);
navUserName = JSON.stringify(dataSQL.navUserName);
} else {
console.log("failed");
}
} catch (err) {
}}
so since this is in node.js if i were to test this sql function only i'd do the following i.e. node index.js - then function will be executed successfully and return result. However, I am calling this function within the azure function like below but when I run the azure function project, then I copy the URL given to test it on postman, the sql function won't return anything !
Any idea of how to execute SQL query function in Azure function if that makes sense ?
module.exports = async function (context, req) {
GetDetails();
const axios = require("axios");
const data = {
email: req.query.email,
password: req.query.password,
};
var cred = "YAMEN" + ":" + "jbdv******";
const encoded = Buffer.from(cred, "utf8").toString("base64");
var credbase64 = "Basic " + encoded;
const headers = {
Authorization: credbase64,
"Content-Type": " application/json",
};
{
try {
const url = `https://tegos/BC19-NUP/QryEnwisAppUser?filter=UserSecurityID eq ${GUID}`;
const response = await axios.get(url, {
headers,
});
console.log(response);
console.log(response.data);
context.res = {
// status: 200, /* Defaults to 200 */
body: response.data,
};
} catch (e) {
// maybe return the error
console.error(e);
}}};
That is not how you connect to a SQL database from an Azure application. You need to use the pyodbc module instead.
Quickstart: Use Python to query a database

Getting Error [Cannot read properties of undefined (reading 'generatetypeinfo')] in Node JS API post method

I am new to Restful API development using NodeJS and SQL Server. I am trying to do a simple [post] operation where I am passing an array of objects to the API endpoint and then calling a SQL Server procedure with a table valued parameter. I am getting the below error
Cannot read properties of undefined (reading 'generateTypeInfo')
I was really shocked to see that there is not a single help topic found over Google regarding this error. I do not want to learn ASP.NET Core for this because JavaScript has an easy learning curve. Am I doing a mistake by developing a Rest API by using the combination of NodeJS and SQL Server? Below is my Related .JS file called in Post endpoint
const sql = require("mssql/msnodesqlv8");
const dataAccess = require("../DataAccess");
const fn_CreateProd = async function (product) {
let errmsg = "";
let connPool = null;
await sql
.connect(global.config)
.then((pool) => {
global.connPool = pool;
result = pool.request().query("select * from products where 1=2");
return result;
})
.then((retResult) => {
const srcTable = retResult.recordset.toTable("tvp_products");
let newsrcTable = Array.from(srcTable.columns);
console.log('Source table b4 mapping',srcTable)
newsrcTable = newsrcTable.map((i) => {
i.name = i.name.toUpperCase();
return i;
});
console.log('Source table after convert array with mapping',newsrcTable)
const prdTable = dataAccess.generateTable(
newsrcTable,
product,
"tvp_products"
);
console.log("Prepared TVp data", prdTable);
const newResult = dataAccess.execute(`sp3s_ins_products_tvp`, [
{ name: "tblprods", value: prdTable },
]);
console.log("Result of Execute Final procedure", newResult);
return newResult;
})
.then(result => {
console.log("Result of proc", result);
if (!result.errmsg) errmsg = "Products Inserted successfully";
else errmsg = result.errmsg;
})
.catch((err) => {
console.log("Enter catch of Posting prod", err.message);
errmsg = err.message;
})
.finally((resp) => {
sql.close();
});
return { retStatus: errmsg };
};
module.exports = fn_CreateProd;
and Content of Generatetable function are as below :
const generateTable = (columns, entities,tvpName) => {
const table = new mssql.Table(tvpName);
// const testobj = {type : [sql.numeric],name : 'Sanjay'}
// console.log('Columns testobj',testobj.type)
columns.forEach(column => {
// console.log('COlumn data for COlumn :',column)
if (column && typeof column === 'object' && column.name && column.type) {
let colOptions = {}
if (column.type==mssql.Numeric)
{
colOptions.scale=column.scale
colOptions.precision=column.precision
}
else
if (column.type==mssql.VarChar || column.type==mssql.Char )
{
colOptions.length = column.length
}
// console.log (`Column name type for column :${column.name} -${colType}-Actual :${column['type']}`)
if (column.hasOwnProperty('options')) {
table.columns.add(column.name.toUpperCase(), colType,column.options);
} else {
table.columns.add(column.name.toUpperCase(),colOptions)
}
}
});
console.log('Generated table',table)
const newEntities = entities.map(obj=>keystoUppercase(obj))
// console.log('New entities after uppercase',newEntities)
newEntities.forEach(entity => {
table.rows.add(...columns.map(i =>
entity[i.name]));
});
return table;
};
I have found the solution now. Actually, if you can see the code of generateTable function, I was adding the columns into the table but not mentioning the data type of the columns due to which this error was coming. I have added one more property [type] in the [colOptions] object being passed to columns.add command in the function [Generatetable]. Thanks a lot anyway to you for quick replies by Dale. K.

how to save data in aws neptune db using node js?

Is there a way to save the data in amazon aws neptune db using node js?
I am running this code on a lambda.
I made the connection to neptune db using the below code.
const gremlin = require('gremlin');
const DriverRemoteConnection = gremlin.driver.DriverRemoteConnection;
const Graph = gremlin.structure.Graph;
dc = new DriverRemoteConnection('endpoint',{});
const graph = new Graph();
const g = graph.traversal().withRemote(dc);
Here's a JavaScript Lambda function that writes data to Neptune (and wraps the write in a retry block in case of concurrent modifications). The function gets the Neptune endpoint and port from environment variables. The write query is in the query() method. It's a simple upsert example that tries to create a vertex using a randomly generated ID. If a vertex with that ID already exists, the query returns that vertex rather than creating a new one.
This example creates a single connection that persists for the lifetime of the Lambda container (rather than per invocation). There's some error checking in the retry code that recreates the connection in the case of an untoward network issue.
const gremlin = require('gremlin');
const async = require('async');
const traversal = gremlin.process.AnonymousTraversalSource.traversal;
const DriverRemoteConnection = gremlin.driver.DriverRemoteConnection;
let conn = createRemoteConnection();
let g = createGraphTraversalSource(conn);
const t = gremlin.process.t;
const __ = gremlin.process.statics;
async function query(id) {
return g.V(id)
.fold()
.coalesce(
__.unfold(),
__.addV('User').property(t.id, id)
)
.id().next();
}
exports.handler = async (event, context) => {
const id = Math.floor(Math.random() * 10000).toString();
return async.retry(
{
times: 5,
interval: 1000,
errorFilter: function (err) {
// Add filters here to determine whether error can be retried
console.warn('Determining whether retriable error: ' + err.message);
// Check for connection issues
if (err.message.startsWith('WebSocket is not open')){
console.warn('Reopening connection');
conn.close();
conn = createRemoteConnection();
g = createGraphTraversalSource(conn);
return true;
}
// Check for ConcurrentModificationException
if (err.message.includes('ConcurrentModificationException')){
console.warn('Retrying query because of ConcurrentModificationException');
return true;
}
return false;
}
},
async function (cb) {
let result = await query(id);
return result['value'];
});
};
function createRemoteConnection() {
return new DriverRemoteConnection(
connectionString(),
{
mimeType: 'application/vnd.gremlin-v2.0+json',
pingEnabled: false
});
}
function createGraphTraversalSource(conn) {
return traversal().withRemote(conn);
}
function connectionString() {
return 'wss://' +
process.env['neptuneEndpoint'] +
':' +
process.env['neptunePort'] +
'/gremlin';
}
Simple demo based off the TinkerPop documentation
const handler = async (event) => {
// add person vertex with a property name and value stephen.
await g.addV('person').property('name','stephen').next();
// fetch all vertex' and get the name properties.
const result = await g.V().values('name').toList();
console.log(result);
return {
statusCode: 201,
body: JSON.stringify({message:"Testing Gremlin!", data:result}),
};
}

Node.js Query sqlite with 'sqlite`

I'm trying to get a hang of Node (I mainly use python) so I'm working on a small project to read an write data to a sqlite database.
I am having no issue writing to the database luckily, but I cannot seem to get queries to work at all. I've tested the queries in the sql terminal and they are successful.
So far, I have something like
const fs = require("fs");
const util = require("util");
const sqlite = require("sqlite");
const Promise = require("bluebird")
// const DATABASE = ":memory:";
const DATABASE = "./database.sqlite";
function insertDataIntoDatabase(transactions, db) {
// Write each transaction into the database.
let sqlStatement = "INSERT INTO Trx \
(name, address, amount, category) \
VALUES "
for (var i = 0; i < transactions.length; ++i) {
let trx = transactions[i];
sqlStatement += util.format(
"('%s', '%s', %d, '%s'), ",
trx.name,
trx.address,
trx.amount,
trx.category,
);
}
sqlStatement = sqlStatement.substring(0, sqlStatement.length - 2);
db.then(db => db.run(sqlStatement))
.catch((err) => console.log(err));
}
function getTransactions (db, category) {
// Return an array of valid transactions of a given category.
let where = "";
if (category) {
where = util.format("WHERE category='%s'", category);
}
let sqlStatement = util.format("SELECT * from Trx %s", where);
sqlStatement = "SELECT * from Trx"; // Trying to figure out whats happening
console.log(sqlStatement);
db.then(db => {
db.all(sqlStatement)
.then((err, rows) => {
console.log(rows); // undefined
console.log(err); // []
})
})
}
// Set up the db connection
const db = sqlite.open(DATABASE, { cached: true })
.then(db => db.migrate({ force: 'last' }));
// Read transactions and write them to the database
fs.readFile("transactions.json", "utf8", (err, data) => {
let transactions = JSON.parse(data).transactions;
insertDataIntoDatabase(transactions, db);
})
// Get transaction data
getValidTransactions(db, 'credit');
// Close connection to DB
db.then(db => db.close());
Looking at this again, I think the issue is the async nature of Node. The query was successful, but at that point in time, I had not inserted the data from the json file into the database yet, hence the empty query.

Cloud Functions for Firebase BigQuery sync error

We're working on a cloud function that allows us to keep our bigquery and firebase database in sync. The function triggers when a place is created/updated/deleted.
Based on the trigger action (create/update/delete) we add a property called big_query_active to signal if the object exists or not. Same goes for the date.
Our current problem is that the call to big query sometimes returns an error. So that would mean that the data is not in sync anymore. How can this be prevented?
'use strict';
// Default imports.
const functions = require('firebase-functions');
const bigQuery = require('#google-cloud/bigquery');
// If you want to change the nodes to listen to REMEMBER TO change the constants below.
// The 'id' field is AUTOMATICALLY added to the values, so you CANNOT add it.
const ROOT_NODE = 'places';
const VALUES = [
'country_id',
'category_id',
'name',
'active',
'archived'
];
// This function listens to the supplied root node, but on child added/removed/changed.
// When an object is inserted/deleted/updated the appropriate action will be taken.
exports.children = functions.database.ref(ROOT_NODE + '/{id}').onWrite(event => {
const query = bigQuery();
const dataset = query.dataset('stampwallet');
const table = dataset.table(ROOT_NODE);
if (!event.data.exists() && !event.data.previous.exists()) {
return;
}
const item = event.data.exists() ? event.data.val() : event.data.previous.val();
const data = {};
data['id'] = event.params.id;
for (let index = 0; index < VALUES.length; index++) {
const key = VALUES[index];
data[key] = item[key] !== undefined ? item[key] : null;
}
data['big_query_date'] = new Date().getTime() / 1000;
data['big_query_active'] = event.data.exists();
return table.insert(data).then(() => {
return true;
}).catch((error) => {
if (error.name === 'PartialFailureError') {
console.log('A PartialFailureError happened while uploading to BigQuery...');
} else {
console.log(JSON.stringify(error));
console.log('Random error happened while uploading to BigQuery...');
}
});
});
This is the error that we (sometimes) receive
{"code":"ECONNRESET","errno":"ECONNRESET","syscall":"read"}
How could it be prevented that the data goes out of sync? Or is there a way to retry so that it always succeeds?

Resources