How to remove one line from a txt file - node.js

I have the following text file ("test.txt") that I want to manipulate in node.js:
world
food
I want to remove the first line so that food becomes the first line instead. How can I do that?

var fs = require('fs')
fs.readFile(filename, 'utf8', function(err, data)
{
if (err)
{
// check and handle err
}
// data is the file contents as a single unified string
// .split('\n') splits it at each new-line character and all splits are aggregated into an array (i.e. turns it into an array of lines)
// .slice(1) returns a view into that array starting at the second entry from the front (i.e. the first element, but slice is zero-indexed so the "first" is really the "second")
// .join() takes that array and re-concatenates it into a string
var linesExceptFirst = data.split('\n').slice(1).join('\n');
fs.writeFile(filename, linesExceptFirst, function(err, data) { if (err) {/** check and handle err */} });
});

I just came across the need to be able to exclude several lines in a file. Here's how I did it with a simple node function.
const fs = require('fs');
const removeLines = (data, lines = []) => {
return data
.split('\n')
.filter((val, idx) => lines.indexOf(idx) === -1)
.join('\n');
}
fs.readFile(fileName, 'utf8', (err, data) => {
if (err) throw err;
// remove the first line and the 5th and 6th lines in the file
fs.writeFile(fileName, removeLines(data, [0, 4, 5]), 'utf8', function(err) {
if (err) throw err;
console.log("the lines have been removed.");
});
})

use replace
const fs = require('fs');
function readWriteSync() {
var data = fs.readFileSync(filepath, 'utf-8');
// replace 'world' together with the new line character with empty
var newValue = data.replace(/world\n/, '');
fs.writeFileSync(filepath, newValue, 'utf-8');
}

Related

Question about Find string and delete line - Node.JS

Find string and delete line - Node.JS
var fs = require('fs')
fs.readFile('shuffle.txt', {encoding: 'utf-8'}, function(err, data) {
if (err) throw error;
let dataArray = data.split('\n'); // convert file data in an array
const searchKeyword = 'user1'; // we are looking for a line, contains, key word 'user1' in the file
let lastIndex = -1; // let say, we have not found the keyword
for (let index=0; index<dataArray.length; index++) {
if (dataArray[index].includes(searchKeyword)) { // check if a line contains the 'user1' keyword
lastIndex = index; // found a line includes a 'user1' keyword
break;
}
}
dataArray.splice(lastIndex, 1); // remove the keyword 'user1' from the data Array
// UPDATE FILE WITH NEW DATA
// IN CASE YOU WANT TO UPDATE THE CONTENT IN YOUR FILE
// THIS WILL REMOVE THE LINE CONTAINS 'user1' IN YOUR shuffle.txt FILE
const updatedData = dataArray.join('\n');
fs.writeFile('shuffle.txt', updatedData, (err) => {
if (err) throw err;
console.log ('Successfully updated the file data');
});
});
This link explains how to find a string and delete lines but only delete one user1 at the time. I have many lines with user1, how can I delete all:
john
doe
some keyword
user1
last word
user1
user1
Also the opposite. How can I delete all the lines and leave only the user1 lines?
I would make use of the Array.filter() function.
Basically, you call filter() on an array, and define a callback function that checks every item of that array.
If the checking function returns true for a particular item, keep that item - put it in a new array
If the checking function returns false, do not put the item in the new array
So, in your case, once you have read all the lines into an array (up to line 6 in your code), use the filter function:
// Delete all instances of user1
let newDataArray = dataArray.filter(line => line !== "user1")
// Delete everything except user1
let newDataArray = dataArray.filter(line => line === "user1")
// Delete any lines that have the text 'user1' somewhere inside them
let newDataArray = dataArray.filter(line => !line.includes("user1"))
Then, just like you have done in your code, use the join() function on newDataArray() and write the file.
To rewrite your code,
var fs = require('fs')
fs.readFile('shuffle.txt', {encoding: 'utf-8'}, function(err, data) {
if (err) throw error;
let dataArray = data.split('\n'); // convert file data in an array
const searchKeyword = 'user1'; // we are looking for a line, contains, key word 'user1' in the file
// Delete all instances of user1
let newDataArray = dataArray.filter(line => line !== searchKeyword)
// UPDATE FILE WITH NEW DATA
const updatedData = newDataArray.join('\n');
fs.writeFile('shuffle.txt', updatedData, (err) => {
if (err) throw err;
console.log ('Successfully updated the file data');
});
});

How to copy file content into a string?

I am trying to "translate" my old scripts done in Ruby to node.js. One of them is about CSV parsing, and I am stuck at step one - load file into a string.
This prints content of my file to the console:
fs = require('fs');
fs.readFile("2015 - Best of.csv", 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
console.log(data);
});
but for some reason I can't catch data into variable:
fs = require('fs');
var x = "";
fs.readFile("2015 - Best of.csv", 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
x = x + data;
});
console.log(x);
How do I store (function's) 'data' variable into (global) 'x' variable?
It is working.
The problem is you are logging x before it gets filled. Since the call is asynchronous, the x variable will only have the contents of the string inside the function.
You can also see the: fs.readFileSync function.
However, I would recommend you get more confortable with node's async features.
Try this:
fs = require('fs');
var x = "";
fs.readFile("2015 - Best of.csv", 'utf8', function (err,data) {
if (err) {
return console.log(err);
}
x = x + data;
console.log(x);
});

Read a file one character at a time in node.js?

Is there a way to read one symbol at a time in nodejs from file without storing the whole file in memory?
I found an answer for lines
I tried something like this but it doesn't help:
const stream = fs.createReadStream("walmart.dump", {
encoding: 'utf8',
fd: null,
bufferSize: 1,
});
stream.on('data', function(sym){
console.log(sym);
});
Readable stream has a read() method, where you can pass the length, in bytes, of every chunk to be read. For example:
var readable = fs.createReadStream("walmart.dump", {
encoding: 'utf8',
fd: null,
});
readable.on('readable', function() {
var chunk;
while (null !== (chunk = readable.read(1) /* here */)) {
console.log(chunk); // chunk is one byte
}
});
Here's a lower-level way to do it: fs.read(fd, buffer, offset, length, position, callback)
using:
const fs = require('fs');
// open file for reading, returns file descriptor
const fd = fs.openSync('your-file.txt','r');
function readOneCharFromFile(position, cb){
// only need to store one byte (one character)
const b = new Buffer(1);
fs.read(fd, b, 0, 1, position, function(err,bytesRead, buffer){
console.log('data => ', String(buffer));
cb(err,buffer);
});
}
you will have to increment the position, as you read the file, but it will work.
here's a quick example of how to read a whole file, character by character
Just for fun I wrote this complete script to do it, just pass in a different file path, and it should work
const async = require('async');
const fs = require('fs');
const path = require('path');
function read(fd, position, cb) {
let isByteRead = null;
let ret = new Buffer(0);
async.whilst(
function () {
return isByteRead !== false;
},
function (cb) {
readOneCharFromFile(fd, position++, function (err, bytesRead, buffer) {
if(err){
return cb(err);
}
isByteRead = !!bytesRead;
if(isByteRead){
ret = Buffer.concat([ret,buffer]);
}
cb(null);
});
},
function (err) {
cb(err, ret);
}
);
}
function readOneCharFromFile(fd, position, cb) {
// only need to store one byte (one character)
const b = new Buffer(1);
fs.read(fd, b, 0, 1, position, cb);
}
/// use your own file here
const file = path.resolve(__dirname + '/fixtures/abc.txt');
const fd = fs.openSync(file, 'r');
// start reading at position 0, position will be incremented
read(fd, 0, function (err, data) {
if (err) {
console.error(err.stack || err);
}
else {
console.log('data => ', String(data));
}
fs.closeSync(fd);
});
As you can see we increment the position integer every time we read the file. Hopefully the OS keeps the file in memory as we go. Using async.whilst() is OK, but I think for a more functional style it's better not to keep the state in the top of the function (ret and isByteRead). I will leave it as an exercise to the reader to implement this without using those stateful variables.

Node.js: Read large text file only partialy

I have to read a very large csv file (> 80MB and growing).
I usually only have to parse the last 1% of the file. But getting to that part takes a few minutes.
Is there a way that I only start reading on line N?
Or alternatively could I read the stream from end to start?
I'm currently using fast-csv to read the file:
// convert csv into postgres copy file
csv.fromPath(filepath, {
headers: false
}).transform(function(data) {
// check if record meets condition
var dt = parseInt(data[0]);
var date = new Date(dt * 1000);
var mom = moment(date);
if (mom.isAfter('2014-01-01 00:00')) {
// transform data and return object
return transform(data);
}
return null;
}).pipe(csv.createWriteStream({
headers: true
})).pipe(fs.createWriteStream(outpath, {
encoding: "utf8"
})).on('finish', function() {
// do postgres import
});
Using a combination of node's fs.stat, fs.open, fs.read, you could find the size of the file and just read the last 1% into a buffer:
var fs = require('fs');
var filename = 'csv.csv';
fs.stat(filename, function(err, stat) {
if(err) throw err;
var bytesToRead = Math.ceil(0.01 * stat.size); // last 1%
var startingPosition = stat.size - bytesToRead;
var readBuffer = new Buffer(bytesToRead);
fs.open(filename, 'r', function(err, fd){
if(err) throw err;
fs.read(fd, readBuffer, 0, bytesToRead, startingPosition,
function(err, bytesRead){
if(err) throw err;
console.log(readBuffer.toString());
});
});
});
You couldn't start reading from line N because you would have to read it all to know where the newline characters are.

Node.js CSV module

I'm using a CSV module as in the example below.
I was wondering if there is a way to make sure that all the fields aren't inserted as strings, e.g. the number 1.23 is not inserted as string "1.23".
It seems to make everything type string.
var csv = require('csv');
var q = async.queue(myCollection.insert.bind(myCollection), 50);
csv()
.from.path(req.files.myCSV.path, {columns: true})
.transform(function(data, index, cb){
q.push(data, function (err, res) {
if (err) return cb(err);
cb(null, res[0]);
});
})
.on('end', function () {
q.drain = function() {};
})
In csv.transform(), before q.push(), you can convert fields using e.g. parseInt:
data.fieldX = parseInt(data.fieldX);
data.fieldY = parseFloat(data.fieldX);
You could also delete data.fieldY; or add fields data.fullName = data.first + ' ' + data.last;

Resources