how to parse data from two csv in node.js - node.js

I am very new to node.js. I have one script that will parse the csv and generate the required output file. Now I want to fetch some of column data from another csv at the same time and add that value to the output file.
Script :
var csv = require('csv');
var fs = require('fs');
var progress = require('progress-stream');
var date = require('date-and-time');
var indexStat = 0;
var header = [];
var headerLine = '$lang=en\n\nINSERT_UPDATE Customer;uid;name;address;phoneno'
var delimeter = ',';
var semicolon = ';';
var inputFile = __dirname+'/project/customer.csv';
var outputFile = __dirname+'/project/customer.impex';
var inputFileName = 'customer.csv';
var outputFileName = 'customer.impex';
function generateRecord(json) {
var record = semicolon + json.uid + semicolon + json.name + semicolon + json.address;
return record;
}
var writeStream = fs.createWriteStream(outputFile);
var parser = csv.parse({
delimiter: delimeter
}, function (err, data) {
if (err) {
console.log(err);
}
});
var transformer = csv.transform(function (data) {
var line = '';
if (indexStat == 0) {
header = data;
var line = headerLine;
} else {
var line = generateRecord(generateRecordObject(data));
}
indexStat++;
writeStream.write(line + '\r\n');
});
function stringSplitter(dataRow) {
var str = dataRow.toString();
return str.split(delimeter);
}
function generateRecordObject(dataRow) {
var record = {};
dataRow.forEach(function (value, index) {
if (header[index] != '') {
record[header[index].toLowerCase()] = value;
}
});
return record;
}
var stat = fs.statSync(inputFile);
var str = progress({
length: stat.size,
time: 100
});
str.on('progress', function (progress) {
writeCompletedPercentageForRead(progress.percentage, progress.eta, progress.runtime, progress.speed);
});
function removeLineBreaks(obj) {
obj = obj.replace(/\\N/g, '');
obj = obj.replace(/&/g, '&');
return obj;
}
function writeCompletedPercentageForRead(p, e, r, s) {
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(`${inputFileName} read in progress to write ${outputFileName} ... Completed:${parseInt(p, 10)} %, ETA:${e} seconds, Elapsed:${r} seconds, Rate:${parseInt(s/1024, 10)} KBps`);
};
fs.createReadStream(inputFile).pipe(str).pipe(parser).pipe(transformer);
customer.csv ->
uid,name,address
1234,manish,bangalore
The above script is working fine and generating customer.impex file as below
INSERT_UPDATE Customer;uid;name;address;phoneno
;1234;manish;bangalore
Now i want to populate phoneno as well but phoneno field is define in another csv file lets say 'customer_phone.csv'.
customer_phone.csv -
uid,phoneno
1234,98777767467
I want to match uid column of customer.csv with customer_phone.csv and get the phoneno from that csv. Finally i want to add phoneno in the customer.impex outfile file.
I have no idea how to parse two csv here and generate the file. Any help ?

var Converter = require("csvtojson").Converter;
var fs = require("fs");
var pathToMainCSV = "./customer.csv";
var pathToContactsCSV = "./contact.csv";
var customerConverter = new Converter({});
var contactConverter = new Converter({});
var contanierObj = {};
function processContacts() {
fs.createReadStream(pathToContactsCSV).pipe(contactConverter);
}
function createImpexFile() {
var headerLine = '$lang=en\n\nINSERT_UPDATE Customer;uid;name;address;phoneno;\n';
Object.keys(contanierObj).forEach(obj => {
Object.keys(contanierObj[obj]).forEach(data => {
headerLine += contanierObj[obj][data] + ';';
});
headerLine += '\n';
});
fs.writeFile("./new.impex", headerLine, function(err) {
if (err) {
return console.log(err);
}
console.log("The file was saved!");
});
}
customerConverter.on("end_parsed", function(jsonArray) {
jsonArray.forEach(v => {
contanierObj[v.uid] = v;
});
processContacts();
});
contactConverter.on("end_parsed", function(jsonArray) {
jsonArray.forEach(v => {
contanierObj[v.uid].contact = v.phoneno;
});
createImpexFile();
});
fs.createReadStream(pathToMainCSV).pipe(customerConverter);
Kindly use something like i have done above, format the string according to your needs

Related

Parse excel file and create JSON format in exceljs ON nodejs

I have this excel file
I need to convert the data excel from file to JSON like below
[
{
"no_pemohonan": "PNL-202109200826210023105",
"sumber_data": "HOSTS",
"tgl_permohonan": "2021-09-20",
},
{
"no_pemohonan": "PNL-202109200845131363376",
"sumber_data": "HOSTS",
"tgl_permohonan": "2021-09-20",
},
...
]
I could make the data with this comment but i have to set the object again like below syntax
const excel = require('exceljs');
const workbook = new excel.Workbook();
await workbook.xlsx.load(objDescExcel.buffer);
let json = workbook.model;
let worksheetsArr = json.worksheets.length;
const arrRow = [];
const arrIdPembatalan = [];
// looping per worksheet
for (let index = 0; index < worksheetsArr; index++) {
let worksheet = workbook.worksheets[index];
// console.log("worksheet " + worksheet);
const rowlast = worksheet.lastRow;
// looping semua row untuk
worksheet.eachRow(async (row, rowNumber) => {
let new_row = row.values
// console.log(new_row);
let no_permohonan= new_row[2]
let sumber_data = new_row[3]
let tgl_permohonan = new_row[4]
let header = {
no_permohonan: no_permohonan,
sumber_data : sumber_data ,
tgl_permohonan : tgl_permohonan ,
};
arrIdPembatalan.push(header)
}
})
}
I want to set the header automatically without have to set the header again like above syntax.
I have seen this solution but it was written in xlsx library, while i am using exceljs.
Here is a nodejs implement.
(async function() {
const excel = require('exceljs');
const workbook = new excel.Workbook();
// use readFile for testing purpose
// await workbook.xlsx.load(objDescExcel.buffer);
await workbook.xlsx.readFile(process.argv[2]);
let jsonData = [];
workbook.worksheets.forEach(function(sheet) {
// read first row as data keys
let firstRow = sheet.getRow(1);
if (!firstRow.cellCount) return;
let keys = firstRow.values;
sheet.eachRow((row, rowNumber) => {
if (rowNumber == 1) return;
let values = row.values
let obj = {};
for (let i = 1; i < keys.length; i ++) {
obj[keys[i]] = values[i];
}
jsonData.push(obj);
})
});
console.log(jsonData);
})();
test result
$ node ./test.js ~/Documents/Book1.xlsx
[
{
no_pemohonan: 'PNL-202109200826210023105',
sumber_data: 'HOSTS',
tgl_permohonan: 2021-09-20T00:00:00.000Z
},
{
no_pemohonan: 'PNL-202109200845131363376',
sumber_data: 'HOSTS',
tgl_permohonan: 2021-09-20T00:00:00.000Z
}
]
If dealing with large files, I would explore stream processing using the following libraries:
Use exceljs to read .xlsx file as stream and write to .csv as stream:
// read from a stream
const readStream = fs.createReadStream('largeWorkbook.xlsx');
const workbook = new Excel.Workbook();
await workbook.xlsx.read(readStream);
// write to stream
const writeStream = fs.createWriteStream('largeWorksheet.csv');
await workbook.csv.write(writeStream, { sheetName: 'Page name' });
Then use csvtojson to transform CSV to JSON:
import csvToJson from 'csvtojson'
const readStream = fs.createReadStream('largeWorksheet.csv')
const writeStream = fs.createWriteStream('largeWorksheet.json')
readStream
.pipe(csvToJson())
.pipe(writeStream)
This will work for large files even on hardware with low memory.
Full code snippet:
import fs from 'fs'
import Excel from 'exceljs'
import csvToJson from 'csvtojson'
const xlsxRead = fs.createReadStream('largeWorkbook.xlsx')
const csvWrite = fs.createWriteStream('largeWorksheet.csv')
const csvRead = () => fs.createReadStream('largeWorksheet.csv')
const jsonWrite = fs.createWriteStream('largeWorksheet.json')
(async function process() {
const workbook = new Excel.Workbook()
await workbook.xlsx.read(xlsxRead)
await workbook.csv.write(csvWrite, { sheetName: 'Worksheet Name' })
csvRead()
.pipe(csvToJson())
.pipe(jsonWrite)
})() // this immediately-invoked wrapper function is just for Node.js runtimes
// that don't support top-level await yet
// if running via `--esm` or from `.mjs` file, it can be ommitted
var Excel = require('exceljs');
var ReadExcelCSV = function (fileType, fileName, filePath, delimeter, textQualifier) {
var deffered = q.defer();
var workbook = new Excel.Workbook();
var finalFilePath = filePath + fileName;
if (fileType == "excel") {
console.log("File Type: Excel");
workbook.xlsx.readFile(finalFilePath).then(function () {
ParseExcelCSVFile(workbook).then(function (resp) {
deffered.resolve(resp);
}, function (error) {
logger.info("Error in Parsing Excel/CSV");
});
}, function (err) {
logger.info("Error In Read Excel: " + JSON.stringify(err));
});
} else {
if (delimeter != undefined && textQualifier != undefined) {
var options = {};
options.delimiter = delimeter;
options.quote = textQualifier;
options.dateFormats = [];
workbook.csv.readFile(finalFilePath, options).then(function () {
ParseExcelCSVFile(workbook).then(function (resp) {
// fs.unlink(finalFilePath);
deffered.resolve(resp);
}, function (error) {
logger.info("Error in Parsing Excel/CSV");
deffered.reject(error);
});
}, function (error) {
logger.info("Error In Read CSV: " + JSON.stringify(error));
deffered.reject(error);
});
} else {
workbook.csv.readFile(finalFilePath).then(function () {
ParseExcelCSVFile(workbook).then(function (resp) {
deffered.resolve(resp);
}, function (error) {
logger.info("Error in Parsing Excel/CSV");
deffered.reject(error);
});
}, function (error) {
logger.info("Error In Read CSV: " + JSON.stringify(error));
deffered.reject(error);
});
}
}
return deffered.promise;
};
var ParseExcelCSVFile = function (workbook) {
try {
var deffered = q.defer();
var objresult = [];
var objheaders = [];
var worksheet = workbook.getWorksheet(1);
worksheet.eachRow(function (row, rowNumber) {
var currentobj = {};
row.eachCell({
includeEmpty: true
}, function (cell, colNumber) {
if (rowNumber == 1) {
objheaders.push(cell.value);
} else {
currentobj[objheaders[colNumber - 1]] = cell.value == null ? '' : cell.value;
}
});
if (rowNumber != 1) {
objresult.push(currentobj);
}
});
deffered.resolve(objresult);
return deffered.promise;
} catch (ex) {
logger.error("Error in ParseExcel: " + ex.stack);
}
};
I wrote this code quite a long time ago so you will see an old module like deffered which you can change easily, but it will help in what you are trying to achieve. It can read and parse excel and csv both.

Synchronous NodeJS batch job

I'm trying to write a batch script that will
Read XMLs from a directory
Parse each XML and find a value to use for DB(database) Lookup
Use the parsed value to DB lookup additional metadata
Populate XML with the metadata retrieved from DB lookup (step 4)
Write updated XML to complete directory
Close DB connection
The issue I'm running into is that I cannot control the code execution order so that I can close the DB connection at the end of the script. If I attempt to close the connection, I get a 'connection undefined' error. Below is my code for reference. Is there a good way to accomplish something like this in NodeJs, or should I look at doing this in Java or some other language?
enter code here
'use strict';
let fs = require('fs');
let xml2js = require('xml2js');
const oracledb = require('oracledb');
const dbConfig = require('./dbconfig.js');
function pad(number, length)
{
var str = '' + number;
while (str.length < length)
{
str = '0' + str;
}
return str;
}
async function run() {
try {
// Get a non-pooled connection
let connection;
if (!connection)
{
connection = await oracledb.getConnection(dbConfig);
console.log('Connection was successful!');
}
let directory = "EDI_XMLS";
let dirBuf = Buffer.from(directory);
//var f = 0;
let files = fs.readdirSync(directory);
console.log(files);
for (let f = 0; f < files.length; f++)
{
let parser = new xml2js.Parser();
var xml_json_data = "";
// read the file
await fs.readFile(directory + "/" + files[f], async function(err, data) {
// parse the file
await parser.parseString(data, async function(err, result) {
let results;
var line_count = result.page.GLLines[0].GLLine.length;
console.dir('Invoice: ' + result.page.InvoiceNumber[0]);
console.dir('Line Total: ' + line_count);
console.log('File: ' + f);
try
{ // Lookup Data
results = await connection.execute('SELECT BUSINESS_UNIT, OPERATING_UNIT, DEPTID,PRODUCT,a.effdt FROM SYSADM.PS_A_NSS_SHPTO_ACC#FDEV a where(a.a_ship_to_customer = :shipTo) order by a.effdt
desc',[pad(result.page.VoucherDescription[0], 10)], {
maxRows: 2
});
console.log(results.metaData);
console.log(results.rows);
}
catch (err)
{
console.error(err);
}
for (let i = 0; i < line_count; i++) // Populate data
{
result.page.GLLines[0].GLLine[i].GLBU[0] = results.rows[0][0];
result.page.GLLines[0].GLLine[i].OpUnit[0] = results.rows[0[1];
result.page.GLLines[0].GLLine[i].Department[0] = results.rows[0][2];
result.page.GLLines[0].GLLine[i].Product[0] = results.rows[0][3];
}
// Write to File
var builder = new xml2js.Builder();
var xml = builder.buildObject(result);
await fs.writeFile("complete/" + files[f], xml, function(err, data) {
if (err) console.log(err);
console.log("successfully written our update xml to file");
console.dir('BUs: ' + JSON.stringify(result.page));
}); //end write
}); //end parser
}); //end readfile
console.log('End');
} // async for
}
catch (err)
{
console.error(err);
}
finally
{
await connection.close();
console.log('Finally Done');
}
}
run();
console.log('completely Done');

Why doesn't my async function return any result?

I wrote this small program to fetch data. This however is done async. Since I nonetheless need to use the function holeVertreter(kzl) as a function in another module, I'd like to get a return value which I can eventually pass on.
Excuse my spaghetti code (I usually prettify the code when I am done with my task ...).
Credentials are stored in a file and are therefore not found in this file.
I'd like to end up with "vertreter" as a return value.
Thank you in advance.
const node = require("deasync");
const DSB = require('dsbapi');
const tabletojson = require('tabletojson');
const https = require('https');
const cred = require("./vertrCred");
const dsb = new DSB(cred["dsb"]["user"], cred["dsb"]["passw"]); //Sanitized - no Credentials here
//Stackoverflow 2332811
String.prototype.capitalize = function(lower) {
return (lower ? this.toLowerCase() : this).replace(/(?:^|\s)\S/g, function(a) { return a.toUpperCase(); });
};
function holePlan(kuerzel) {
dsb.fetch()
.then(data => {
const timetables = DSB.findMethodInData('timetable', data);
const tiles = DSB.findMethodInData('tiles', data);
var tilesStr = JSON.stringify(tiles["data"][0]["url"]);
var url = JSON.parse(tilesStr);
https.get(url, (resp) => {
let data = '';
resp.on('data', (chunk) => {
data += chunk;
});
resp.on('end',() => {
var tableasjson = tabletojson.convert(data);
var erstetab = tableasjson[0];
var zweitetab = tableasjson[1];
var drittetab = tableasjson[2];
var viertetab = tableasjson[3];
var fuenftetab = tableasjson[4];
var sechstetab = tableasjson[5];
var siebtetab = tableasjson[6];
var achtetab = tableasjson[7];
if (typeof kuerzel === "undefined")
{
var regenechse = '(Aaa|Aaa[A-Za-z?]|[A-Za-z?]Aaa)';
}
else {
var name = kuerzel.capitalize(true);
var regenechse = '('+name+'|'+name+'[A-Za-z?]|[A-Za-z?]'+name+')';
}
const regex = new RegExp(regenechse,'g');
var sammel = Object.assign(drittetab,fuenftetab);
var z= 0;
var vertreter = {}
var y = JSON.parse(JSON.stringify(sammel));
for (i=0;i<y.length;i++) {
if (typeof y[i].Vertreter =='undefined') {
}
else {
if(y[i].Vertreter.match(regex))
{
z += 1;
vertreter[z] = y[i];
}
}
}
if (z == 0) {
// console.log("Es gibt nichts zu vertreten");
}
else {
//console.log("Es werden "+z+" Stunden vertreten");
return (vertreter);
} ;
});
})
})
.catch(e => {
// An error occurred :(
console.log(e);
});
}
//Stackoverflow
function warte(promise) {
var done = 0;
var result = null;
promise.then(
function (value) {
done = 1;
result = value;
return (value);
},
function (reason) {
done = 1;
throw reason;
}
);
while (!done)
node.runLoopOnce();
return (result);
}
function holeVertretung(kzl) {
var aufgabe = new Promise((resolve,reject) => {
setTimeout(resolve,1000,holePlan(kzl));
});
var ergebnis = warte(aufgabe);
if (typeof ergebnis === "undefined") {
console.log("Mist");
}
else {
console.log(ergebnis);
}
return ergebnis;
}
holeVertretung("Aaa");
That's not the right way to work with promises. If you do such infinite loop, it beats the whole purpose of using promises. Instead, return value from the promise, and use async-await like this:
function warte(promise) {
var done = 0;
var result = null;
return promise.then(
...
}
async function holeVertretung(kzl) {
var aufgabe = new Promise((resolve, reject) => {
setTimeout(resolve, 1000, holePlan(kzl));
});
var ergebnis = await warte(aufgabe);
...
If async-await does not work for some reason, use then clause:
warte(aufgabe).then(value => {
var ergebnis = value;
});

Nodejs streams writablestream drain event not firing

I am trying to read in a large file, do some computation and then write to a much bigger file. To prevent excessive memory consumption, I am using streams. The problem that I am facing is that the writestream is not firing the "drain" event, which signals that the writes have been flushed to disk. In order to prevent "back-pressure", I am waiting for the drain event to be fired before I start writing to the buffer again. While debugging I found that after a .write() call returns false and the line fvfileStream.once('drain', test) is executed, the program just stops and does not do anything.
Here is the code:
var fs = require('fs');
//a test function I created to see if the callback is called after drain.
var test = function(){
console.log("Done Draining");
}
fs.readFile('/another/file/to/be/read', {
encoding: "utf8"
}, function(err, data) {
if (err) throw err;
//Make an array containing tags.
var tags = data.split('\n');
//create a write stream.
var fvfileStream = fs.createWriteStream('/path/TagFeatureVectors.csv');
//read in the question posts
var qfileStream = fs.createReadStream('/Big/file/QuestionsWithTags.csv', {
encoding: "utf8"
});
var partialRow = null;
var writable = true;
var count = 0;
var doRead = function() {
var qData = qfileStream.read();
var questions = qData.split('\n');
if (partialRow != null) {
questions[0] = partialRow + questions[0];
partialRow = null;
}
var lastRow = questions[questions.length - 1];
if (lastRow.charAt(lastRow.length - 1) != '\n') {
partialRow = lastRow;
}
questions.forEach(function(row, index, array) {
count++;
var fields = row.split(',');
console.log("Processing question number: " + count + " id: " + fields[0]);
var tagString = fields[1];
var regex = new RegExp(/<([^>]+)>/g);
tags.forEach(function(tag, index, array) {
var found = false;
var questionTags;
while ((questionTags = regex.exec(tagString)) != null) {
var currentTag = questionTags[1]
if (currentTag === tag) {
found = true;
break;
}
};
//This is where the writestream is written to
if (found) {
writable = fvfileStream.write("1,", "utf8");
}else {
writable = fvfileStream.write("0,","utf8");
}
});
});
fvfileStream.write("\n");
}
qfileStream.on('readable', function() {
if (writable) {
doRead();
} else {
//Waiting for drain event.
fvfileStream.once('drain', test);
}
});
qfileStream.on('end', function() {
fvfileStream.end();
});
});
Updated
Based on advise provided by #loganfsmyth, I implemented transform streams, but still ran into the same issue. Here is my updated code:
var fs = require('fs');
var stream = require('stream');
var util = require('util');
var Transform = stream.Transform;
function FVCreator(options) {
// allow use without new
if (!(this instanceof FVCreator)) {
return new FVCreator(options);
}
// init Transform
Transform.call(this, options);
}
util.inherits(FVCreator, Transform);
var partialRow = null;
var count = 0;
var tags;
FVCreator.prototype._transform = function(chunk, enc, cb) {
var that = this;
var questions = chunk.toString().split('\n');
if (partialRow != null) {
questions[0] = partialRow + questions[0];
partialRow = null;
}
var lastRow = questions[questions.length - 1];
if (lastRow.charAt(lastRow.length - 1) != '\n') {
partialRow = lastRow;
questions.splice(questions.length - 1, 1);
}
questions.forEach(function(row, index, array) {
count++;
var fields = row.split(',');
console.log("Processing question number: " + count + " id: " + fields[0]);
var tagString = fields[1];
var regex = new RegExp(/<([^>]+)>/g);
tags.forEach(function(tag, index, array) {
var found = false;
var questionTags;
while ((questionTags = regex.exec(tagString)) != null) {
var currentTag = questionTags[1]
if (currentTag === tag) {
found = true;
break;
}
};
if (found) {
that.push("1,", "utf8");
} else {
that.push("0,", "utf8");
}
});
});
this.push("\n", "utf8");
cb();
};
fs.readFile('/another/file/to/be/read', {
encoding: "utf8"
}, function(err, data) {
if (err) throw err;
//Make an array containing tags.
tags = data.split('\n');
//write to a file.
var fvfileStream = fs.createWriteStream('/path/TagFeatureVectors.csv');
//read in the question posts
var qfileStream = fs.createReadStream('/large/file/to/be/read', {
encoding: "utf8"
});
var fvc = new FVCreator();
qfileStream.pipe(fvc).pipe(fvfileStream);
});
I am running this on OSX Yosemite.

Read the last line of a CSV file and extract one value

New to Node.js and trying to pull a value from the very last line of a CSV file. Here is the CSV:
Unit ID,Date,Time,Audio File
Log File Created,3/6/2013,11:18:25 AM,file:\\\C:\Users\Ben\Documents\1_03-06-2013_1114-50.mp3
1,3/6/2013,11:20:24 AM,file:\\\C:\AlertLog\1_03-06-2013_1120-24.mp3
1,3/6/2013,11:20:39 AM,file:\\\C:\AlertLog\1_03-06-2013_1120-24.mp3
The part I am trying to grab is file:\\\C:\AlertLog\1_03-06-2013_1120-24.mp3 - preferably getting rid of the file:\\\ part.
Sorry that I do not have any code to show, just have a few hours of experience with Node.js and cannot seem to find any docs on how to accomplish something like this. Any help would be appreciated. Thanks!
Regular file
Read the file like a regular file, split the file contents into lines, take the last line, split by a comma and take the last part.
var fs = require('fs'); // file system module
fs.readFile('/path/to/file.csv', 'utf-8', function(err, data) {
if (err) throw err;
var lines = data.trim().split('\n');
var lastLine = lines.slice(-1)[0];
var fields = lastLine.split(',');
var audioFile = fields.slice(-1)[0].replace('file:\\\\', '');
console.log(audioFile);
});
File System module documentation
CSV parser
You can also use the node-csv-parser module.
var fs = require('fs');
var csv = require('csv');
csv()
.from.stream(fs.createReadStream('/path/to/file.csv'))
.to.array(function(data, count) {
var lastLine = data.slice(-1)[0];
var audioFile = lastLine.slice(-1)[0].replace('file:\\\\', '');
console.log(audioFile);
});
I did it by reading the file backwards until the last line was read:
Update 2022 (use modern javascript and read buffer only once)
import { open } from 'node:fs/promises';
import path from 'node:path';
import { fileURLToPath } from 'url';
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const fileName = path.join(__dirname, 'test.txt');
async function readLastLine(name) {
var line = '';
var chunkSize = 200 // how many characters to read from the end of file
const fd = await open(name);
const st = await fd.stat();
const buf = Buffer.alloc(chunkSize);
const len = buf.length;
const { bytesRead, buffer } = await fd.read(buf, 0, len, st.size - len)
for (let i = len - 1; i > -1; i--) {
const isEol = buffer[i] === 0x0a // 0x0a == '\n'
const isCtrl = buffer[i] < 0x20 // 0-31 are ASCII control characters
if (isEol && line.length > 0) {
break;
} else if (!isCtrl && !isEol) {
line = String.fromCharCode(buffer[i]) + line;
}
}
fd.close();
return line;
}
try {
const line = await readLastLine(fileName)
console.log(line);
} catch (err) {
console.error(err);
}
Old 2014 answer
var fs = require('fs');
var path = require('path');
var fileName = path.join(__dirname, 'test.txt');
var readLastLine = function(name, callback) {
fs.stat(name, function(err, stat) {
fs.open(name, 'r', function(err, fd) {
if(err) throw err;
var i = 0;
var line = '';
var readPrevious = function(buf) {
fs.read(fd, buf, 0, buf.length, stat.size-buf.length-i, function(err, bytesRead, buffer) {
if(err) throw err;
line = String.fromCharCode(buffer[0]) + line;
if (buffer[0] === 0x0a) { //0x0a == '\n'
callback(line);
} else {
i++;
readPrevious(new Buffer(1));
}
});
}
readPrevious(new Buffer(1));
});
});
}
readLastLine(fileName, function(line) {
console.log(line);
});

Resources