node js and hbase - node.js

I am trying to connect to hbase via. nodejs hbase module.
I have used
https://github.com/wdavidw/node-hbase
to download hbase module for nodejs and trying to run the below code in my nodejs script which uses express and handles the incoming request :-
.......
app.post('/upload', function(req, res,next){
var read_stream = fs.createReadStream(req.files.upload_file.path, {encoding: 'base64'});
read_stream.on("data", function(data){
// process.stdout.write(data);
dataload(data);
});
read_stream.on("error", function(err){
console.error("An error occurred: %s", err)
});
read_stream.on("close", function(){
console.log("File closed.")
});
}
function dataload(data){
var hbase = require('hbase');
var tableobj=hbase({ host: "{my server ip}", port: "8080" }).getTable('my_table1');
sys.debug(tableobj);
tableobj.create("my_column_family", function(err, success){
if(err)
{
sys.debug("error1");
}
else
{
this.getRow('my_row')
.put('my_column_family:my_column', data, function(err, success){
if(err)
{
sys.debug("error");
}
else
{
sys.debug("success");
}
});
}
});
}
Problem faced :- when i restart the server[i.e the server created by this script by again giving "node {script js file name}" on cygwin command prompt] , then the hbase table is re-created and all the rows previously inserted are removed.

Related

How to convert JSON to CSV and then save to computer as CSV file

I'm currently trying to pull json data from and API, convert it to csv using the json2csv node.js module, and then save the data as a csv file on my laptop. However, when I run the script, nothing happens.
The json data is then formatted similar to the below data variable:
const apiDataPull = postDataRequest()
.then(data => {
data = [
{
'day': '*date*',
'revenue': '*revenue value*'
}
]
And this is to convert the data to csv and download it, which is where the problem seems to be arising:
apiDataPull.then(data => {
json2csv({
data: data,
fields: ['day', 'revenue', 'totalImpressions', 'eCPM']
},
function(err, csv) {
if (err) console.log(err);
fs.writeFile('pubmaticData.csv', csv, function(err){
if (err) throw err;
console.log('File Saved!')
});
});
});
There is data being pulled from the API, but it's not being saved. I'm not even sure if it's been converted to csv properly or not.
You can use this npm package - csv-stringify.
https://github.com/adaltas/node-csv-stringify
//Load HTTP module
const http = require("http");
var stringify = require('csv-stringify');
const fs = require('fs');
const hostname = '127.0.0.1';
const port = 3000;
//Create HTTP server and listen on port 3000 for requests
const server = http.createServer(async (req, res) => {
stringify(jsonObject, {header:true}, function(err, output) {
fs.writeFile('formatted_json.csv', output, 'utf8', function(err) {
if (err) {
console.log('Some error occured - file either not saved or corrupted file saved.');
} else {
console.log('It\'s saved!');
}
});
});
});
//listen for request on port 3000, and as a callback function have the port listened on logged
server.listen(port, hostname, () => {
console.log(`Server running at http://${hostname}:${port}/`);
});
This should help you. Create this file - name it index.js
Make sure you have node & npm installed, and run in the same directory
npm install
npm install csv-stringify
node index.js
go to your browser open localhost:3000 and you will see formatted_json.csv created in the same directory where index.js is located. Hope this helps!
You are probably not starting the promises, and it looks like you are not using the json2csv correctly.
Take a look at this example:
let json2csv = require("json2csv");
let fs = require("fs");
apiDataPull = Promise.resolve([
{
'day': '*date*',
'revenue': '*revenue value*'
}]).then(data => {
return json2csv.parseAsync(data, {fields: ['day', 'revenue', 'totalImpressions', 'eCPM']})
}).then(csv => {
fs.writeFile('pubmaticData.csv', csv, function (err) {
if (err) throw err;
console.log('File Saved!')
});
});
The saved file is:
"day","revenue","totalImpressions","eCPM"
"*date*","*revenue value*",,

Azure web app tedious

I want to use use tedious in my Azure web app to follow this tutorial https://learn.microsoft.com/en-us/azure/sql-database/sql-database-connect-query-nodejs I get the error "Uncaught Error: Module name "tedious" has not been loaded yet" with require('tedious').Connection. How do I load this module in Azure?
The javascript code:
var Connection = require('tedious').Connection;
var Request = require('tedious').Request;
// Create connection to database
var config = {
userName: '******', // update me
password: '*****', // update me
server: '*******', // update me
options: {
database: 'signals' //update me
}
}
var connection = new Connection(config);
// Attempt to connect and execute queries if connection goes through
connection.on('connect', function(err) {
if (err) {
console.log(err)
}
else{
queryDatabase()
}
});
function queryDatabase(){
console.log("test");
console.log("test");
console.log('Reading rows from the Table...');
// Read all rows from table
request = new Request(
"SELECT * FROM signals",
function(err, rowCount, rows) {
console.log(rowCount + ' row(s) returned');
}
);
request.on('row', function(columns) {
columns.forEach(function(column) {
console.log("%s\t%s", column.metadata.colName, column.value);
});
});
connection.execSql(request);
}
How do I load this module in Azure?
In Azure, you can install Node.js module through Kudu Debug Console which could be accessed via https://<your-web-app-name>.scm.azurewebsites.net/DebugConsole
cd to D:\home\site\wwwroot in the console.
run the following command inside the wwwroot directory: npm install tedious

Improving performance of inserting into Mongo from ActiveMQ

The basic idea of the following code is I read messages off an ActiveMQ Artemis installation and insert them into a MongoDB instance.
It works well for up to a hundred or so messages per second but crashes if I throw a few thousand at it. My first guess would be the constant opening and closing of database connections. Should I also think about using an in-memory store and doing bulk database inserts?
The code is all running in node using the mqtt and mongodb npm packages. The code below, the database and the queue are all running in docker containers if it makes any difference.
var mqtt = require('mqtt'),
client = mqtt.connect('mqtt://mq:1883', {
username: "*************",
password: "*************"
}),
MongoClient = require('mongodb').MongoClient,
ObjectId = require('mongodb').ObjectID,
assert = require('assert'),
url = 'mongodb://db:27017/uo-readings';
client.on('connect', function () {
client.subscribe('readings');
});
client.on('error', function(error){
console.log(error)
});
client.on('message', function (topic, message) {
console.log(message.toString());
MongoClient.connect(url, function(err, db) {
assert.equal(null, err);
console.log("Connected correctly to server.");
db.collection('readings').insertOne(JSON.parse(message.toString()), function(err, result) {
assert.equal(err, null);
console.log("Inserted a document into the readings collection.");
});
client.end(function(){
console.log("Closing Connection.");
db.close();
});
});
});
See #Jonathan Muller's comment above

Downloading file in node using GridFS in production

I have an express app, which works when I run it locally. The issue is when downloading a file which as saved in mongoDB using GridFS. When running it locally (I just do ./bin/www and go to localhost:3000), I can download the file. But when I run it remotely, I download an html file.
This is the route which handles the response:
router.get('/getfile',function(req,res) {
if (req.isAuthenticated())
{
var gfs = Grid(mongoose.connection, mongoose.mongo);
var id = req.query.id;
gfs.exist({_id: id}, function (err, found) {
if (err) return handleError(err);
if (!found)
res.send('Error on the database looking for the file.')
});
var readStream = gfs.createReadStream({
_id: id
}).pipe(res);
}
else
res.redirect('/login');
});
and that is called by this line in a jade file:
td #[a(href="getfile?id=#{log.videoId}" download="video") #[span(name='video').glyphicon.glyphicon-download]]
On the server, I'm doing:
/logApp$ export NODE_ENV=production
/logApp$ ./bin/www
the mongoDB deamon is running. In fact, I can query the database. And I'm not writing any file! I want to read it.
EDIT: I found the error message:
MongoError: file with id #### not opened for writing
You need to move the code that pipes the file to the response into the gfs.exist callback so that it runs after the exist check.
gfs.exist({ _id: id }, function(err, found) {
if (err) {
handleError(err);
return;
}
if (!found) {
res.send('Error on the database looking for the file.')
return;
}
// We only get here if the file actually exists, so pipe it to the response
gfs.createReadStream({ _id: id }).pipe(res);
});
Apparently you get that generic "not opened for writing" error if the file doesn't exist.

Node js oracle module: the console log statement not working when select query is executed

I have installed the node-oracle module and have connected to an oracle database successfully. But, I have a problem when I execute a select query. My code is below (the file name is tests-connection.js):
var oracle = require('./');
var connectData = {
hostname: "myhost",
port: 1521,
database: "mydb", // System ID (SID)
user: "usertest",
password: "mypass"
}
oracle.connect(connectData, function(err, connection) {
if (err) { console.log("Error connecting to db:", err); return; }
connection.execute("SELECT systimestamp FROM DUAL", [], function(err, results) {
if (err) { console.log("Error executing query:", err); return; }
console.log(results); //This statement's not working
connection.close(); // call only when query is finished executing
});
});
The output in Node.js command prompt (nothing):
C:\xampp\htdocs\motcua.dev\public\socket.io\node_modules\oracle>node tests-connection
C:\xampp\htdocs\motcua.dev\public\socket.io\node_modules\oracle>
I have tried to update a record and it works, but when I execute a select query I do not see any output in the console log statement. The terminal is empty until the code that is executing is finished.
How can one fix this problem?
Thanks

Resources