node.js retrieve http csv file and load into mongoose - node.js

I'm very new to coding in general, so I apologize ahead of time if this question should be rather obvious. Here's what I'm looking to do, and following that I'll post the code I've used so far.
I'm trying to get gzip'd csv rank data from a website and store it into a database, for a clan website that I'm working on developing. Once I get this figured out, I'll need to grab the data once every 5 minutes. The grabbing the csv data I've been able to accomplish, although it stores it into a text file and I need to store it into mongodb.
Here's my code:
var DB = require('../modules/db-settings.js');
var http = require('http');
var zlib = require('zlib');
var fs = require('fs');
var mongoose = require('mongoose');
var db = mongoose.createConnection(DB.host, DB.database, DB.port, {user: DB.user, pass: DB.password});
var request = http.get({ host: 'www.earthempires.com',
path: '/ranks_feed?apicode=myapicode',
port: 80,
headers: { 'accept-encoding': 'gzip' } });
request.on('response', function(response) {
var output = fs.createWriteStream('./output');
switch (response.headers['content-encoding']) {
// or, just use zlib.createUnzip() to handle both cases
case 'gzip':
response.pipe(zlib.createGunzip()).pipe(output);
break;
default:
response.pipe(output);
break;
}
});
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', function callback () {
var rankSchema = new mongoose.Schema({
serverid: Number,
resetid: Number,
rank: Number,
countryNumber: Number,
name: String,
land: Number,
networth: Number,
tag: String,
gov: String,
gdi: Boolean,
protection: Boolean,
vacation: Boolean,
alive: Boolean,
deleted: Boolean
})
});
Here's an example of what the csv will look like(first 5 lines of file):
9,386,1,451,Super Kancheong Style,22586,318793803,LaF,D,1,0,0,1,0
9,386,2,119,Storm of Swords,25365,293053897,LaF,D,1,0,0,1,0
9,386,3,33,eug gave it to mak gangnam style,43501,212637806,LaF,H,1,0,0,1,0
9,386,4,128,Justpickupgirlsdotcom,22628,201606479,LaF,H,1,0,0,1,0
9,386,5,300,One and Done,22100,196130870,LaF,H,1,0,0,1,0

Hope it's not too late to help, but here's what I'd do:
Request the CSV formatted data and store it in memory or a file.
Parse the CSV data to convert each row into an object.
For each object, use Model.create() to create your new entry.
First, you need to create a model from your Schema:
var Rank = db.model('Rank', rankSchema);
Then you can parse your block of CSV text (whether you read it from a file or do it directly after your request is up to you.) I created my own bogus data variable since I don't have access to the api, but as long as your data is a newline delimited block of CSV text this should work:
/* Data is just a block of CSV formatted text. This can be read from a file
or retrieved right in the response. */
var data = '' +
'9,386,1,451,Super Kancheong Style,22586,318793803,LaF,D,1,0,0,1,0\n' +
'9,386,2,119,Storm of Swords,25365,293053897,LaF,D,1,0,0,1,0\n' +
'9,386,3,33,eug gave it to mak gangnam style,43501,212637806,LaF,H,1,0,0,1,0\n' +
'9,386,4,128,Justpickupgirlsdotcom,22628,201606479,LaF,H,1,0,0,1,0\n' +
'9,386,5,300,One and Done,22100,196130870,LaF,H,1,0,0,1,0\n';
data = data.split('\n');
data.forEach(function(line) {
line = line.split(',');
if (line.length != 14)
return;
/* Create an object representation of our CSV data. */
var new_rank = {
serverid: line[0],
resetid: line[1],
rank: line[2],
countryNumber: line[3],
name: line[4],
land: line[5],
networth: line[6],
tag: line[7],
gov: line[8],
gdi: line[9],
protection: line[10],
vacation: line[11],
alive: line[12],
deleted: line[13]
};
/* Store the new entry in MongoDB. */
Rank.create(new_rank, function(err, rank) {
console.log('Created new rank!', rank);
});
});
You could put this in a script and run it every 5-minutes using a cron job. On my Mac, I'd edit my cron file with crontab -e, and I'd setup a job with a line like this:
*/5 * * * * /path/to/node /path/to/script.js > /dev/null

Related

REST API with node-rtsp-stream NPM

I am trying to stream a RTSP in HTML5 pages using node-rtsp-stream NPM. Here I can see the live stream in HTML page. But the thing is when I try to do REST API with this it throws TypeError: stream is not a constructor. when I call my post method first time its working properly. when I try to do the same again it throws error.
here is my API:
RTSPRouter.post('/getPreview', (req, res) => {
// stream.mpeg1Muxer.kill();
stream = new stream({
name: 'name',
streamUrl: req.body.RTSPURL,
wsPort: 9999,
ffmpegOptions: {
'-r': 30
}
})
res.send(stream)
})
API for Kill :
RTSPRouter.get('/killPreview', (req, res) => {
process.kill(req.body.pid1)
stream.prototype.stop()// this method also not working
})
Even I killed the stream alone using the PID it's throwing the same error.
Kindly help me to fix this problem, thanks in advance!
you're getting a typeError because you haven't imported Stream.
Your code should look like this:
Stream = require("node-rtsp-stream");
stream = new Stream({
name: "name",
streamUrl: "rtsp://184.72.239.149/vod/mp4:BigBuckBunny_115k.mov",
wsPort: 9999,
ffmpegOptions: {
// options ffmpeg flags
"-stats": "", // an option with no neccessary value uses a blank string
"-r": 30, // options with required values specify the value after the key
},
});
res.send(stream);
Also don't forget to run npm install node-rtsp-stream.
After a few days I found the answer for this question.
You should not create stream like this:
var stream = require('node-rtsp-stream')
stream = new stream({
name: 'name',
streamUrl: req.body.RTSPURL,
wsPort: 9999,
ffmpegOptions: {
'-r': 30
}
})
Instead of this, try the below code:
var stream = require('node-rtsp-stream')
var streamObj;
streamObj = new stream({
name: 'name',
streamUrl: req.body.RTSP,
wsPort: 9999,
ffmpegOptions: { // options ffmpeg flags
'-stats': '', // an option with no neccessary value uses a blank string
'-r': 30 // options with required values specify the value after the key
}
})
The actual error is that I had imported a stream with the name stream, and tried to initiate the stream with same variable name stream. You have to use a different variable name to initialize the stream. In my case I used streamobj.

unable to read empty cells reading excel file using js-xlsx

I have an excel file I am reading using js-xlsx. The code is working fine except where the cells are empty. These cells are ignored. How do I get these cells also when creating my JSON object?
I went through some of the question on SO as well as some other forums for the same problem but nothing satisfactory.
Any help would be welcome. My code is:
reader.addEventListener('load', function(){
var data = this.result;
var wb = XLSX.read(data, {type: 'binary', sheetStubs:true});
// console.log(headers);
wb.SheetNames.forEach(function(sheetName){
//pulling out column headers for tablecreation
var headers = get_header_row(wb.Sheets[sheetName]);
createTableInDB(headers);
// Here is your object
var XL_row_object = XLSX.utils.sheet_to_json(wb.Sheets[sheetName]);
//console.log(XL_row_object);
for(var i=0; i<XL_row_object.length; i++){
var json_object = XL_row_object[i];
if(json_object !== null){
var dataobject = {
"tablename": tname,
"dbname": dbname,
"info": json_object,
"uname": uname
}
dataobject = $.toJSON(dataobject);
$.ajax({
type: "POST",
url: "insertIntoTable.php",
async: false,
data:"pInsertData=" + dataobject,
success: function(msg){
console.log(msg);
}
});
//console.log(json_object);
}
}
});
});
reader.readAsBinaryString(document.querySelector('input').files[0]);
The file is uploaded through an input in HTML.
Thanks in Advance
Just pass default value in sheet_to_json method:
var jsonObj = XLS.utils.sheet_to_json(data.Sheets[data.SheetNames[0]], {
header: 0,
defval: ""
});
The library has an option for that. In your code below:
...
// Here is your object
var XL_row_object = XLSX.utils.sheet_to_json(wb.Sheets[sheetName]);
//console.log(XL_row_object);
...
You should provide the following option, to the options argument {defval: null} as follows:
...
// Here is your object
var XL_row_object = XLSX.utils.sheet_to_json(wb.Sheets[sheetName], {defval: null});
//console.log(XL_row_object);
...
Then, it should work.
Solution 1 .Condition "if(h===undefined)continue;" in "xlsx.core.min.js" comment it out.
or do it properly...
Solution 2 . By passing Condition extra param while running this XLSX.utils.sheet_to_json(wb.Sheets[name] , {blankCell : false}). add a condition on line no. 19150 "if(defval === undefined && blankCell) continue;" in file xlsx.js etc..

Write a line into a .txt file with Node.js

I want to use Node.js to create a simple logging system which prints a line before the past line into a .txt file. However, I don't know how the file system functionality from Node.js works.
Can someone explain it?
Inserting data into the middle of a text file is not a simple task. If possible, you should append it to the end of your file.
The easiest way to append data some text file is to use build-in fs.appendFile(filename, data[, options], callback) function from fs module:
var fs = require('fs')
fs.appendFile('log.txt', 'new data', function (err) {
if (err) {
// append failed
} else {
// done
}
})
But if you want to write data to log file several times, then it'll be best to use fs.createWriteStream(path[, options]) function instead:
var fs = require('fs')
var logger = fs.createWriteStream('log.txt', {
flags: 'a' // 'a' means appending (old data will be preserved)
})
logger.write('some data') // append string to your file
logger.write('more data') // again
logger.write('and more') // again
Node will keep appending new data to your file every time you'll call .write, until your application will be closed, or until you'll manually close the stream calling .end:
logger.end() // close string
Note that logger.write in the above example does not write to a new line. To write data to a new line:
var writeLine = (line) => logger.write(`\n${line}`);
writeLine('Data written to a new line');
Simply use fs module and something like this:
fs.appendFile('server.log', 'string to append', function (err) {
if (err) return console.log(err);
console.log('Appended!');
});
Step 1
If you have a small file
Read all the file data in to memory
Step 2
Convert file data string into Array
Step 3
Search the array to find a location where you want to insert the text
Step 4
Once you have the location insert your text
yourArray.splice(index,0,"new added test");
Step 5
convert your array to string
yourArray.join("");
Step 6
write your file like so
fs.createWriteStream(yourArray);
This is not advised if your file is too big
I created a log file which prints data into text file using "Winston" logger. The source code is here below,
const { createLogger, format, transports } = require('winston');
var fs = require('fs')
var logger = fs.createWriteStream('Data Log.txt', {
flags: 'a'
})
const os = require('os');
var sleep = require('system-sleep');
var endOfLine = require('os').EOL;
var t = ' ';
var s = ' ';
var q = ' ';
var array1=[];
var array2=[];
var array3=[];
var array4=[];
array1[0] = 78;
array1[1] = 56;
array1[2] = 24;
array1[3] = 34;
for (var n=0;n<4;n++)
{
array2[n]=array1[n].toString();
}
for (var k=0;k<4;k++)
{
array3[k]=Buffer.from(' ');
}
for (var a=0;a<4;a++)
{
array4[a]=Buffer.from(array2[a]);
}
for (m=0;m<4;m++)
{
array4[m].copy(array3[m],0);
}
logger.write('Date'+q);
logger.write('Time'+(q+' '))
logger.write('Data 01'+t);
logger.write('Data 02'+t);
logger.write('Data 03'+t);
logger.write('Data 04'+t)
logger.write(endOfLine);
logger.write(endOfLine);
function mydata() //user defined function
{
logger.write(datechar+s);
logger.write(timechar+s);
for ( n = 0; n < 4; n++)
{
logger.write(array3[n]);
}
logger.write(endOfLine);
}
var now = new Date();
var dateFormat = require('dateformat');
var date = dateFormat(now,"isoDate");
var time = dateFormat(now, "h:MM:ss TT ");
var datechar = date.toString();
var timechar = time.toString();
mydata();
sleep(5*1000);

JSON to Excel convertion in Nodejs

I'm trying to convert large amount of json data to excel and tried couple of modules
Below are my findings, if anyone used better node module which handle more data please let me know so that i can explore
json2xls
JSON array with 100000 length took 402574ms
once i exceeded to 200000 it failed with this error FATAL ERROR: CALL_AND_RETRY_2 Allocation failed - process out of memory
node-xls
JSON array with 100000 length took 444578ms
I tried this in windows 7 system with 8GB RAM, Intel Core i7, CPU # 2.10Ghz - 2.70Ghz
First push your data into a temporary array with required column and then convert it into xls, I have done it in following manner:
// use the below package to convert json to xls
var json2xls = require('json2xls');
json.forEach(function(instance, indexx,record){
var tempArry = {
'ColoumnName1' : record[indexx].columnNameVlaue,
'ColoumnName2' : record[indexx].columnNameVlaue,
'ColoumnName3' : record[indexx].columnNameVlaue,
'ColoumnName4' : record[indexx].columnNameVlaue
}
jsonArray.push(tempArry);
});
//this code is for sorting xls with required value
jsonArray.sort(function(a, b) {
return parseFloat(b.ColoumnName4) - parseFloat(a.ColoumnName4);
});
var xls = json2xls(jsonArray);
fs.writeFileSync('yourXLName.xlsx', xls, 'binary');
Dont try to add all the data into the excel file, use the specific columns you want in the file to be saved.
If its a nodejs project then do this,
const xlsx = require("xlsx")//npm install xlsx
const fs = require("fs")//npm install fs
var rawFile = fs.readFileSync("./datas.json")//dir of your json file as param
var raw = JSON.parse(rawFile)
var files = []
for (each in raw){
files.push(raw[each])
}
var obj = files.map((e) =>{
return e
})
var newWB = xlsx.book_new()
var newWS = xlsx.utils.json_to_sheet(obj)
xlsx.utils.book_append_sheet(newWB,newWS,"name")//workbook name as param
xlsx.writeFile(newWB,"Sample-Sales-Data.xlsx")//file name as param
In Ratul Das' answer, there is a typo on the following line:
var newWB = xlsx.book_new()
The code should read:
var newWB = xslx.utils.book_new()
The snippet below is the code I use to generate an Excel spreadsheet from an array of JSON objects named imageList:
const workSheet = XLSX.utils.json_to_sheet(imageList);
const workBook = XLSX.utils.book_new();
XLSX.utils.book_append_sheet(workBook, workSheet, "Product Image Catalog");
// Generate buffer
XLSX.write(workBook, {bookType: 'xlsx', type: 'buffer'})
// Binary String
XLSX.write(workBook, {bookType: 'xlsx', type: 'binary'})
XLSX.writeFile(workBook, 'image-catalog.xlsx')
Building the buffer helps with large amounts of data.
If your JSON is already properly formatted, you juste have to do:
const json2xls = require('json2xls');
// Example JSON
const json = [{firstName: 'Bob', name: 'Lennon'}, {firstName: 'Jack', name: 'Sparrow'}]
const xls = json2xls(json);
fs.writeFileSync('exported.xlsx', xls, 'binary');
Works fine, and very simple.

Log Rotation in Node.js?

In my web analytics, I am logging the data in plain text file. I want to rotate the log on a daily basis because its logging too much data. Currently I am using bunyan to rotate the logs.
Problem I am facing
It is rotating the file correctly, but rotated log file are in the name log.0, log.1, etc. I want the file name to be log.05-08-2013, log.04-08-2013
I can't edit the source of the bunyanpackage because we are installing the modules using package.json via NPM.
So my question is - Is there any other log rotation in Node.js that meets my requirement?
Winston does support log rotation using a date in the file name. Take a look at this pull request which adds the feature and was merged four months ago. Unfortunately the documentation isn't listed on the site, but there is another pull request pending to fix that. Based on that documentation, and the tests for the log rotation features, you should be able to just add it as a new Transport to enable the log rotation functionality. Something like the following:
winston.add(winston.transports.DailyRotateFile, {
filename: './logs/my.log',
datePattern: '.dd-MM-yyyy'
});
If you also want to add logrotate (e.g. remove logs that are older than a week) in addition to saving logs by date, you can add the following code:
var fs = require('fs');
var path = require("path");
var CronJob = require('cron').CronJob;
var _ = require("lodash");
var logger = require("./logger");
var job = new CronJob('00 00 00 * *', function(){
// Runs every day
// at 00:00:00 AM.
fs.readdir(path.join("/var", "log", "ironbeast"), function(err, files){
if(err){
logger.error("error reading log files");
} else{
var currentTime = new Date();
var weekFromNow = currentTime -
(new Date().getTime() - (7 * 24 * 60 * 60 * 1000));
_(files).forEach(function(file){
var fileDate = file.split(".")[2]; // get the date from the file name
if(fileDate){
fileDate = fileDate.replace(/-/g,"/");
var fileTime = new Date(fileDate);
if((currentTime - fileTime) > weekFromNow){
console.log("delete fIle",file);
fs.unlink(path.join("/var", "log", "ironbeast", file),
function (err) {
if (err) {
logger.error(err);
}
logger.info("deleted log file: " + file);
});
}
}
});
}
});
}, function () {
// This function is executed when the job stops
console.log("finished logrotate");
},
true, /* Start the job right now */
'Asia/Jerusalem' /* Time zone of this job. */
);
where my logger file is:
var path = require("path");
var winston = require('winston');
var logger = new winston.Logger({
transports: [
new winston.transports.DailyRotateFile({
name: 'file#info',
level: 'info',
filename: path.join("/var", "log", "MY-APP-LOGS", "main.log"),
datePattern: '.MM--dd-yyyy'
}),
new winston.transports.DailyRotateFile({
name: 'file#error',
level: 'error',
filename: path.join("/var", "log", "MY-APP-LOGS", "error.log"),
datePattern: '.MM--dd-yyyy',
handleExceptions: true
})
]});
module.exports = logger;
There's the logrotator module for log rotation that you can use regardless of the logging mechanism.
You can specify the format option to format the date format (or any other format for that matter)
var logrotate = require('logrotator');
// use the global rotator
var rotator = logrotate.rotator;
// or create a new instance
// var rotator = logrotate.create();
// check file rotation every 5 minutes, and rotate the file if its size exceeds 10 mb.
// keep only 3 rotated files and compress (gzip) them.
rotator.register('/var/log/myfile.log', {
schedule: '5m',
size: '10m',
compress: true,
count: 3,
format: function(index) {
var d = new Date();
return d.getDate()+"-"+d.getMonth()+"-"+d.getFullYear();
}
});
mongodb
winston itself does not support log rotation. My bad.
mongodb has a log rotation use case. Then you can export the logs to file names per your requirement.
winston also has a mongodb transport but I don't think it supports log rotation out of the box judging from its API.
This may be an overkill though.
forking bunyan
You can fork bunyan and add your repo's url in package.json.
This is the easiest solution if you're fine with freezing bunyan's feature or maintaining your own code.
As it is an open source project, you can even add your feature to it and submit a pull request to help improve bunyan.

Resources