Reading file using Node.js "Invalid Encoding" Error - node.js

I am creating an application with Node.js and I am trying to read a file called "datalog.txt." I use the "append" function to write to the file:
//Appends buffer data to a given file
function append(filename, buffer) {
let fd = fs.openSync(filename, 'a+');
fs.writeSync(fd, str2ab(buffer));
fs.closeSync(fd);
}
//Converts string to buffer
function str2ab(str) {
var buf = new ArrayBuffer(str.length*2); // 2 bytes for each char
var bufView = new Uint16Array(buf);
for (var i=0, strLen=str.length; i < strLen; i++) {
bufView[i] = str.charCodeAt(i);
}
return buf;
}
append("datalog.txt","12345");
This seems to work great. However, now I want to use fs.readFileSync to read from the file. I tried using this:
const data = fs.readFileSync('datalog.txt', 'utf16le');
I changed the encoding parameter to all of the encoding types listed in the Node documentation, but all of them resulted in this error:
TypeError: Argument at index 2 is invalid: Invalid encoding
All I want to be able to do is be able to read the data from "datalog.txt." Any help would be greatly appreciated!
NOTE: Once I can read the data of the file, I want to be able to get a list of all the lines of the file.

Encoding and type are an object:
const data = fs.readFileSync('datalog.txt', {encoding:'utf16le'});

Okay, after a few hours of troubleshooting a looking at the docs I figured out a way to do this.
try {
// get metadata on the file (we need the file size)
let fileData = fs.statSync("datalog.txt");
// create ArrayBuffer to hold the file contents
let dataBuffer = new ArrayBuffer(fileData["size"]);
// read the contents of the file into the ArrayBuffer
fs.readSync(fs.openSync("datalog.txt", 'r'), dataBuffer, 0, fileData["size"], 0);
// convert the ArrayBuffer into a string
let data = String.fromCharCode.apply(null, new Uint16Array(dataBuffer));
// split the contents into lines
let dataLines = data.split(/\r?\n/);
// print out each line
dataLines.forEach((line) => {
console.log(line);
});
} catch (err) {
console.error(err);
}
Hope it helps someone else with the same problem!

This works for me:
index.js
const fs = require('fs');
// Write
fs.writeFileSync('./customfile.txt', 'Content_For_Writing');
// Read
const file_content = fs.readFileSync('./customfile.txt', {encoding:'utf8'}).toString();
console.log(file_content);
node index.js
Output:
Content_For_Writing
Process finished with exit code 0

Related

How to delete lines of text from file with createWriteStream with Node.js?

I'm trying to update a huge text document by deleting text that is dynamically received from an array. I cannot use readFileSync because the file is way too large so I have to stream it. The problem im encountering is the function deletes everything instead of only deleting what's in the array. Perhaps im not understanding how to properly delete something from a stream. How can this be done?
largeFile_example.txt
test_domain_1
test_domain_2
test_domain_3
test_domain_4
test_domain_5
test_domain_6
test_domain_7
test_domain_8
test_domain_9
test_domain_10
stream.js
const es = require('event-stream');
const fs = require('fs');
//array of domains to delete
var domains = ['test_domain_2','test_domain_6','test_domain_8'];
//loop
domains.forEach(function(domain){
//domain to delete
var dom_to_delete = domain;
//stream
var s = fs
.createReadStream('largeFile_example.txt')
.pipe(es.split())
.pipe(
es
.mapSync(function(line) {
//check if found in text
if(line === dom_to_delete){
//delete
var newValue = dom_to_delete.replace(line, '');
fs.createWriteStream('largeFile_example.txt', newValue, 'utf-8');
}
})
.on('error', function(err) {
console.log('Error while reading file.', err);
})
.on('end', function() {
//...do something
}),
);
})
You can simply use readline interface with the streams and you can read line by line. When you encounter any domain from the array just don't add it.
You can use for-of with async/await
const fs = require('fs');
const readline = require('readline');
async function processLine() {
const fileStream = fs.createReadStream('yourfile');
const rl = readline.createInterface({
input: fileStream,
crlfDelay: Infinity
});
// Note: crlfDelay recognize all instances of CR LF
// ('\r\n') in file as a single line break.
for await (const line of rl) {
// each line will be here as domain
// create a write stream and append it to the file
// line by line using { flag: a }
}
}
processLine();
To delete the domains from the existing file, you need to follow these steps:
Need to read the file as a stream.
Replace the text you don't want with the '' using regex or replace method.
add the updated content to the temp file or a new file.
There is no way you can read from one point and update the same line. I mean I am not aware of such a technique in Node.js(will be happy to know that). So that's why you need to create a new file and once updated remove the old file.
Maybe you can add some more value to how you code it as I am not sure why you want to do that. If your file is not large you can do that in-place, but your case is different.

Can not determine the file type from file content using Node.js

I can not check the what is the file type from file content. I am explaining my code below.
const FileType = require('file-type');
let content = 'eyJ0ZXN0LXNjaGVtYTp0ZXN0LXNjaGVtYSI6eyJuYW1lIjoiYlpuYUpmd0VXdGwiLCJkZXZpY2UiOiJ0ZXN0ZGV2aWNlMCIsImludGVyZmFjZS1pZCI6IlRlbkdpZ0UzLzQvMy82IiwiaW50ZXJmYWNlX2Rlc2NyaXB0aW9uIjoiMkNQMGg5QllmSW1sQkdpWG9DdXRzVzBtTkN3a3U2OFdKOGRncXhCeDhBOU1aRzVsSHl2ZXVCbXNQRmlPQWlSajdOR20xUiIsImludGVyZmFjZV9kZXNjIjoieFlRWHVDOHM2ZCIsInJlZHVuZGFuY3kiOiJpbmRlcGVuZGVudCIsInZsYW4iOjEsImN2bGFuIjo4NTcsImFkZHIiOiIyNTUuMDE3LjIxMC4yNTAiLCJwb2xpY2llcyI6W3sibmFtZSI6InR4NHBoR29yRDROZSIsImJhbmR3aWR0aCI6MjY2MDU2OTU4NiwiYmFuZHdpZHRoLXVuaXQiOiJicHMifV0sInNlY29uZENvbnQiOnsic2Vjb25kT3V0ZXJMaXN0IjpbXX19fQ==';
let stepsRawData = Buffer.from(content, 'base64');
await isFileType(stepsRawData);
isFileType = async (buffer) => {
try{
console.log('filetype',await FileType.fromBuffer(buffer));
}catch(err) {
return false;
}
}
Here I am using file-type module to check what is the file type. I have some base64 encoded file content and I need to check what type(i.e- json/xml/txt) of file content this. But as per my code this console message is showing filetype undefined. Here I need to determine the file type from encoded file content.Can any body help me to fix this problem ?
isFileType = async (buffer) => {
try {
console.log('filetype', await FileType.fromBuffer(Buffer.from(buffer, 'base64')));
} catch(err) {
return false;
}
}

Getting error while reading json file using node.js

I am getting the following error while reading the json file using Node.js. I am explaining my code below.
SyntaxError: Unexpected token # in JSON at position 0
at JSON.parse (<anonymous>)
My json file is given below.
test.json:
#PATH:/test/
#DEVICES:div1
#TYPE:p1
{
name:'Raj',
address: {
city:'bbsr'
}
}
This json file has some # included strings . Here I need to remove those # included string from this file. I am explaining my code below.
fs.readdirSync(`${process.env['root_dir']}/uploads/${fileNameSplit[0]}`).forEach(f => {
console.log('files', f);
let rawdata = fs.readFileSync(`${process.env['root_dir']}/uploads/${fileNameSplit[0]}/${f}`);
let parseData = JSON.parse(rawdata);
console.log(parseData);
});
Here I am trying to read the code first but getting the above error. My need is to remove those # included lines from the json file and then read all the data and convert the removed lines to object like const obj ={PATH:'/test/',DEVICES:'div1',TYPE:p1}. Here I am using node.js fs module to achive this.
As you said, you need to remove those # lines from the JSON file. You need to code this yourself. To help with that, read the file into a string and not a Buffer by providing a charset to readFileSync.
const text = fs.readFileSync(path, 'utf8');
console.log(text);
const arr = raw.split("\n");
const noComments = arr.filter(x => x[0] !== "#"));
const filtered = noComments.join("\n");
const data = JSON.parse(filtered);
console.log(data);

Problem Utf-8 decoding nodejs - Invalid continuation byte at readContinuationByte

I'm developping a nodejs server and i want to convert a csv file to a json file. I succeed this part, but on of my data ( the libelle_etape is not on a good format (utf8), the values are like: 'EII/MEA 5ème année' or 'Geau/STE 4ème année' etc...). So in my function i would like to decode those values to have the right format on my json file.
I have the result that i want when i do :
const result= utf8.decode(str)
BUT the problem is : when i want to remplace the old String that i have in my csv file (this one : "EII/MEA 5ème année") by the good one (result = "EII/MEA 5ème année") I have the following error :
*Unhandled rejection Error: Invalid continuation byte
at readContinuationByte *
The entire code is :
CSVToJSON()
.fromFile('./infoEtu.csv')
.then((source) => {
const oneData = source[0];
for (let i = 0; i < source.length; i++) {
for (let j = 0; j < Object.keys(source[i]).length; j++) {
const columnName = Object.keys(source[i]);
columnName.forEach((element) => {
if (element == 'Libelle_etape') {
const str = source[i]['Libelle_etape'];
const result = utf8.decode(str);
console.log(result); // this line show me the good result
source[i]['Libelle_etape'] = String(result); // this line is definitely the problem , i've tried with and withou de String() methods but it's the same error
}
});
}
}
const data = JSON.stringify(source);
FileSystem.writeFileSync('./jsonEtu.json', data);
});
Thank you in advance for your help, i'm searching for a long time now and i can find the same problem anywhere.
UPDATE :
The problem was because of the accents !
By doing that it's working :
const result = accents.remove(utf8.decode(str));
source[i]['Libelle_etape'] = result;

Write a line into a .txt file with Node.js

I want to use Node.js to create a simple logging system which prints a line before the past line into a .txt file. However, I don't know how the file system functionality from Node.js works.
Can someone explain it?
Inserting data into the middle of a text file is not a simple task. If possible, you should append it to the end of your file.
The easiest way to append data some text file is to use build-in fs.appendFile(filename, data[, options], callback) function from fs module:
var fs = require('fs')
fs.appendFile('log.txt', 'new data', function (err) {
if (err) {
// append failed
} else {
// done
}
})
But if you want to write data to log file several times, then it'll be best to use fs.createWriteStream(path[, options]) function instead:
var fs = require('fs')
var logger = fs.createWriteStream('log.txt', {
flags: 'a' // 'a' means appending (old data will be preserved)
})
logger.write('some data') // append string to your file
logger.write('more data') // again
logger.write('and more') // again
Node will keep appending new data to your file every time you'll call .write, until your application will be closed, or until you'll manually close the stream calling .end:
logger.end() // close string
Note that logger.write in the above example does not write to a new line. To write data to a new line:
var writeLine = (line) => logger.write(`\n${line}`);
writeLine('Data written to a new line');
Simply use fs module and something like this:
fs.appendFile('server.log', 'string to append', function (err) {
if (err) return console.log(err);
console.log('Appended!');
});
Step 1
If you have a small file
Read all the file data in to memory
Step 2
Convert file data string into Array
Step 3
Search the array to find a location where you want to insert the text
Step 4
Once you have the location insert your text
yourArray.splice(index,0,"new added test");
Step 5
convert your array to string
yourArray.join("");
Step 6
write your file like so
fs.createWriteStream(yourArray);
This is not advised if your file is too big
I created a log file which prints data into text file using "Winston" logger. The source code is here below,
const { createLogger, format, transports } = require('winston');
var fs = require('fs')
var logger = fs.createWriteStream('Data Log.txt', {
flags: 'a'
})
const os = require('os');
var sleep = require('system-sleep');
var endOfLine = require('os').EOL;
var t = ' ';
var s = ' ';
var q = ' ';
var array1=[];
var array2=[];
var array3=[];
var array4=[];
array1[0] = 78;
array1[1] = 56;
array1[2] = 24;
array1[3] = 34;
for (var n=0;n<4;n++)
{
array2[n]=array1[n].toString();
}
for (var k=0;k<4;k++)
{
array3[k]=Buffer.from(' ');
}
for (var a=0;a<4;a++)
{
array4[a]=Buffer.from(array2[a]);
}
for (m=0;m<4;m++)
{
array4[m].copy(array3[m],0);
}
logger.write('Date'+q);
logger.write('Time'+(q+' '))
logger.write('Data 01'+t);
logger.write('Data 02'+t);
logger.write('Data 03'+t);
logger.write('Data 04'+t)
logger.write(endOfLine);
logger.write(endOfLine);
function mydata() //user defined function
{
logger.write(datechar+s);
logger.write(timechar+s);
for ( n = 0; n < 4; n++)
{
logger.write(array3[n]);
}
logger.write(endOfLine);
}
var now = new Date();
var dateFormat = require('dateformat');
var date = dateFormat(now,"isoDate");
var time = dateFormat(now, "h:MM:ss TT ");
var datechar = date.toString();
var timechar = time.toString();
mydata();
sleep(5*1000);

Resources