Problem Statement:
Complete function readFile to read the contents of the file sample.txt
and return the content as plain text response.
Note:
make sure when you read file mention its full path.
for e.g - suppose you have to read file xyz.txt
then instead of writing './xyz.txt' or 'xyz.txt'
write like ${__dirname}/xyz.txt
My Code:
const fs = require('fs');
const path = require('path');
let readFile = () => {
let file = path.join(__dirname,'/xyz.txt') ;
let variableFile = fs.readFileSync(file);
return variableFile.toString();
};
module.exports = {
readFile:readFile
};
You have to pass an encoding parameter to readFileSync or it will return a buffer:
const variableFile = fs.readFileSync(file, "utf8");
return variableFile;
PS: You should not use synchronous calls in production, there is a very neat API called "promisify" that allows you to use async/await or promises with fs:
const {promisify} = require('util');
const fs = require('fs');
const readFile = promisify(fs.readFile);
const example = async () => {
const file = await readFile(/*...*/);
}
Related
I try downloading files with the fetch() function from github.
Then i try to save the fetched file Stream as a file with the fs-module.
When doing it, i get this error:
TypeError [ERR_INVALID_ARG_TYPE]: The "transform.writable" property must be an instance of WritableStream. Received an instance of WriteStream
My problem is, that i don't know the difference between WriteStream and WritableStream or how to convert them.
This is the code i run:
async function downloadFile(link, filename = "download") {
var response = await fetch(link);
var body = await response.body;
var filepath = "./" + filename;
var download_write_stream = fs.createWriteStream(filepath);
console.log(download_write_stream.writable);
await body.pipeTo(download_write_stream);
}
Node.js: v18.7.0
You can use Readable.fromWeb to convert body, which is a ReadableStream from the web streams API, into a NodeJS Readable stream that can be used with the fs methods.
Note that readable.pipe returns another stream instantly. To wait for it to finish, you can use the promise version of stream.finished to convert it into a Promise, or else you could add listeners for the 'finish' and 'error' events to detect success or failure.
const fs = require('fs');
const { Readable } = require('stream');
const { finished } = require('stream/promises');
async function downloadFile(link, filepath = './download') {
const response = await fetch(link);
const body = Readable.fromWeb(response.body);
const download_write_stream = fs.createWriteStream(filepath);
await finished(body.pipe(download_write_stream));
}
Good question. Web streams are something new, and they are different way of handling streams. WritableStream tells us that we can create WritableStreams as follows:
import {
WritableStream
} from 'node:stream/web';
const stream = new WritableStream({
write(chunk) {
console.log(chunk);
}
});
Then, you could create a custom stream that writes each chunk to disk. An easy way could be:
const download_write_stream = fs.createWriteStream('./the_path');
const stream = new WritableStream({
write(chunk) {
download_write_stream.write(chunk);
},
});
async function downloadFile(link, filename = 'download') {
const response = await fetch(link);
const body = await response.body;
await body.pipeTo(stream);
}
module.exports = {
name: "help",
execute(msg, args){
const fs = require("fs");
const commandFiles = fs.readdirSync("./commands/").filter(file => file.endsWith(".js"));
for (const file of commandFiles){
const name = file.slice(0, -3);
const descriptionFileName = name.concat(".desc");
const descriptionFile = `./commands/${descriptionFileName}`;
var output = "Help:";
fs.readFile(descriptionFile, function(err, data){
const helpLine = name.concat(" - ",data.toString());
output = output + "\n" + helpLine
});
msg.channel.send(output);
}
}
}
Expected output:
help - description
ping - description
Output:
Help:
Help:
Any idea why that happens?
Im new at coding and very new at js.
you didn't get the expected result because readFile(file, cb) reads a file asynchronously. This means that it just schedule a callback cb to be executed once the I/O operation has been completed. However the following code:
msg.channel.send(output)
will be executed synchronously so the output will remain with the initial value.
One way to handle this could be with promises, here a partial example based on your code:
module.exports = {
name: 'help',
async execute(msg, args) {
const { readFile, readdir } = require('fs').promises;
const fs = require('fs');
const commandFiles = await readdir('./commands/').filter((file) => file.endsWith('.js'));
const promises = [];
for (const file of commandFiles) {
promises.push(
fs.readFile(file)
)
}
const results = await Promise.all(promises);
// manipulate results as you want
msg.channel.send(results);
},
};
Note that because of the async prefix the exported execute function you need to handle a promise in the consumer of this module
Another approach could be to use a fully parallel control flow pattern
Some references:
promises
async/await
control flow
I am trying to create a function where I can pass file path and the read the file in async way. What I found out was that it supports streams()
const fs = require('fs');
var parse = require('csv-parse');
var async = require('async');
readCSVData = async (filePath): Promise<any> => {
let csvString = '';
var parser = parse({delimiter: ','}, function (err, data) {
async.eachSeries(data, function (line, callback) {
csvString = csvString + line.join(',')+'\n';
console.log(csvString) // I can see this value getting populated
})
});
fs.createReadStream(filePath).pipe(parser);
}
I got this code from here. but I am new to node js so I am not getting how to use await to get the data once all lines are parsed.
const csvData = await this.util.readCSVData(path)
My best workaround for this task is:
const csv = require('csvtojson')
const csvFilePath = 'data.csv'
const array = await csv().fromFile(csvFilePath);
This answer provides legacy code that uses async library. Promise-based control flow with async doesn't need this library. Asynchronous processing with async.eachSeries doesn't serve a good purpose inside csv-parse callback because a callback waits for data to be filled with all collected data.
If reading all data into memory is not an issue, CSV stream can be converted to a promise:
const fs = require('fs');
const getStream = require('get-stream');
const parse = require('csv-parse');
readCSVData = async (filePath): Promise<any> => {
const parseStream = parse({delimiter: ','});
const data = await getStream.array(fs.createReadStream(filePath).pipe(parseStream));
return data.map(line => line.join(',')).join('\n');
}
I want to read from file to stream, pipe the output to a function that will upperCase the content and then write to file. This is my attempt. What am I doing wrong?
const fs = require('fs')
const fred = q => {
return q.toUpperCase()
}
fs.createReadStream('input.txt')
.pipe(fred)
.pipe(fs.createWriteStream('output.txt'))
Currently the error is:
dest.on is not a function
Based on answer from Marco but tidied up:
const fs = require('fs')
const {Transform} = require('stream')
const upperCaseTransform = new Transform({
transform: (chunk, encoding, done) => {
const result = chunk.toString().toUpperCase()
done(null, result)
}
})
fs.createReadStream('input.txt')
.pipe(upperCaseTransform)
.pipe(fs.createWriteStream('output.txt'))
You have to use Transform if you want to "transform" streams. I recommend you to read: https://community.risingstack.com/the-definitive-guide-to-object-streams-in-node-js/
const fs = require('fs')
const Transform = require('stream').Transform;
/// Create the transform stream:
var uppercase = new Transform({
decodeStrings: false
});
uppercase._transform = function(chunk, encoding, done) {
done(null, chunk.toString().toUpperCase());
};
fs.createReadStream('input.txt')
.pipe(uppercase)
.pipe(fs.createWriteStream('output.txt'))
EDIT: You need to call .toString() in chunk because it's a buffer! :)
Using async iteration is the cleaner new way to transform streams:
const fs = require('fs');
(async () => {
const out = fs.createWriteStream('output.txt');
for await (const chunk of fs.createReadStream('input.txt', 'utf8')) {
out.write(chunk.toUpperCase());
}
})();
As you can see, this way is a lot more terse and readable if you already are working in an async function context.
Everything works when I require a normal JavaScript file in Node:
var test = require('test.js');
console.log(test.foo); // prints the function
But now I have compressed that test.js file, reading it as a string works:
var fs = require('fs');
var zlib = require('zlib');
var gunzip = zlib.createGunzip();
var buffer = [];
fs.createReadStream('test.js.gz').pipe(gunzip);
gunzip.on('data', function(data){
buffer.push(data.toString())
});
gunzip.on('finish', function(){
console.log(buffer.join("")); //prints the JS file as string
}).on("error", function(e) {
console.log(e);
});
But, I don't want a string. Using eval does not seem right.
How can I evaluate the string as a JavaScript similar to what I get with require('test.js'); (I cannot use external libraries)
Here's a possible implementation:
const Module = require('module');
const zlib = require('zlib');
const fs = require('fs');
function requireGZ(filename) {
let code = zlib.gunzipSync(fs.readFileSync(filename)).toString();
let mod = new Module();
mod._compile(code, filename);
return mod.exports;
}
// Use:
let test = requireGZ('./test.js.gz');