I can read the files using nodejs file system:
const fs = require('fs');
fs.readFile('./assets/test1.txt', (err, data) => {
if(err){
console.log(err)
}
console.log(data.toString())
})
console.log('hello shawn!')
Why console.log('hello shawn!') read first times then read the console.log(data.toString())?
Is there any other things in file system read data first then read below console?
It is because .readFile is a asynchronous operation. Its last parameter is callback function, which is started after operation is done. I recommend read something about callbacks and event loop.
You can use a synchronous version of function readFileSync or use utils.promisify to convert callback function to promise and use async/await then example.
Related
This question already has answers here:
How do I return the response from an asynchronous call?
(41 answers)
Closed 12 months ago.
I created these variables and to each of them assigned the text of each txt file. If i stored them in the the variables, will they still run in the background while executing the file or would this be Synchronously?
const getGiraffe = fs.readFile("./txt/giraffe.txt", "utf-8", (err, data) => {
return data;
});
const getRhino = fs.readFileSync("./txt/rhino.txt", "utf-8", (err, data) => {
return data;
});
Neither of your code examples is correct.
In the first code example:
const getGiraffe = fs.readFile("./txt/giraffe.txt", "utf-8", (err, data) => {
return data;
});
You are reading the file (with no error handling) and then you do return data back to the callback, but that doesn't go anywhere. So, getGiraffe will simply be undefined.
To use the asynchronous version of fs.readFile(), you must use the data value inside the callback or call some function from within that callback and pass it the data. You cannot simply return the data out.
fs.readFile("./txt/giraffe.txt", "utf-8", (err, data) => {
if (err) {
console.log(err);
} else {
console.log(data);
}
});
In the second code example:
const getRhino = fs.readFileSync("./txt/rhino.txt", "utf-8", (err, data) => {
return data;
});
You are passing a callback to a function fs.readFileSync() that does not accept a callback. That callback will be ignored.
The calling signature for fs.readFileSync() is this:
fs.readFileSync(path[, options])
It takes only two arguments, neither is a callback.
Your question
If i stored them in the the variables, will they still run in the background while executing the file or would this be Synchronously?
The first code example will run in the background asynchronously, but getGiraffe will be undefined. It will not have the data from your file in it. That is just not how fs.readFile() works. You must use the data it delivers INSIDE the callback itself.
Your second code example, is synchronous and blocking. It will block the event loop and getRhino will contain the data from the file, but your callback will be ignored and never called because that's not a supported parameter for fs.readFileSync().
For more information on communicating back a result to the caller from an asynchronous function like fs.readFile() see this reference:
How to return the response from an asynchronous call
I'm writing a small nodejs script to copy files from another folder, modify some text and minify the contents.
As per my research, there seems to be module called async with which functions can be called one by one. But I'm trying how to do the same without installing any modules (as a part of learning).
I'm new to this promises/async world. But I could not get it to work sequentially as expected.
Can someone please help ?
function updateText( textList ){
return new Promise(function(resolve, reject){
var folders = [ 'includes/lr', 'includes/vc' ];
var acceptedFileTypes = ['php', 'js', 'css'];
folders.forEach(function(dir){
files = walk(dir);
files.forEach( function ( file ) {
fs.readFile(file, 'utf8', function (err,data) {
fileType = file.split('.').pop();
if(!acceptedFileTypes.includes(fileType)){
return;
}
console.log('Editing - ' + file);
if (err) {
return console.log(err);
}
for(text in textList){
data = data.replace(new RegExp(text, 'g'), textList[text]);
}
fs.writeFile(file, data, 'utf8', function (err) {
console.log('Written file - ' + file);
if (err) return console.log(err);
});
});
});
});
console.log('Resolving')
resolve();
});
}
function minifyJS(){
return new Promise(function(resolve, reject){
minify('includes/vc/js/script-front.js').then(function(data){
console.log('Minifying file - ');
fs.writeFile('includes/vc/js/script-front.min.js', data, 'utf8', function (err) {
if (err) return console.log(err);
resolve();
});
});
});
}
updateText({
'qwerty': 'hello',
})
.then(function(r){
minifyJS().then(function(){
console.log('# Done');
});
})
Output - things are all over the place and not in sequence, though promise is resolved only after editing the files.
Here I'm expecting, all edits to finish first, writes second and minify at the last.
Resolving
Editing - includes/lr/css/style.css
Editing - includes/vc/css/style.css
Editing - includes/vc/js/script-front.js
Editing - includes/vc/js/script-front.min.js
Minifying file -
Editing - includes/vc/js/script.js
Editing - includes/lr/js/script.js
Editing - includes/vc/index.php
Editing - includes/lr/index.php
Written file - includes/lr/css/style.css
Written file - includes/vc/css/style.css
Written file - includes/vc/js/script-front.js
Written file - includes/vc/js/script.js
Written file - includes/lr/js/script.js
Written file - includes/vc/js/script-front.min.js
# Done
Written file - includes/vc/index.php
Written file - includes/lr/index.php
there seems to be module called async with which functions can be called one by one. But I'm trying how to do the same without installing any modules (as a part of learning).
async/await does not require any module loading and is available as part of the NodeJS runtime starting with NodeJS 8.0.0
here is a post with some examples of how to use
I see the use of fs.readFile and fs.writeFile, both of which make use of the callback function.
That callback functions will be invoked once fs.readFile/fs.writeFile completes their task.
Calls to fs.readFile and fs.writeFile are placed on the event-loop and are not guaranteed to complete in the order in which they were added.
To demonstrate the blocking of the event-loop, you can make use of the synchronous calls from the fs module
fs.readFileSync
fs-writeFileSync
I would suggest restructuring the code to make use of util.promisify which will will allow the fs.readFile and fs.writeFile to be "promisified" and then can be used with async/await.
const listOfFilesRead = await readFiles
await writeFiles(listOfFilesRead)
await minify
readFiles would be a declared async function that makes use of util.promisify on fs.readFile, returning an array of data to be used in writeFiles
writeFiles would be a declared async function that makes use of util.promisify on fs.writeFile to await on each write, before moving onto the next, taking in an array of data of files to be written
minify is an async function that will minify the JS
That will give the order you are wanting of readAll, writeAll, minify
As it is written, it is attempting to do the following
read a file
write that file
repeat for all files
then minify
Since you are learning, the links provided should give examples of how to write callback, promise-based, and async-await of asynchronous functions.
I'm assuming NO because it returns a void instead of Promise<void>, however, the documentation states the following sentence:
Asynchronously reads the entire contents of a file.
How does this function read a file asynchronously if it's not an async function?
It is asynchronous.
It does not return anything (let alone a promise (and a promise that resolved as void (nothing) would be pointless).
It accepts a callback that will be called when the result is available.
See the documentation which says, explicitly:
Asynchronously reads the entire contents of a file.
and has an example of how to use it:
import { readFile } from 'fs';
readFile('/etc/passwd', (err, data) => {
if (err) throw err;
console.log(data);
});
A version that returns a promise is also available.
Yes, it is an asynchronous function because it takes a callback and calls callback function when the data is ready.
That's why it is asynchronous.
Example from the doc:
readFile('/etc/passwd', (err, data) => {
if (err) throw err;
console.log(data);
});
fs.readfile is asynchronous and can return a result later by calling a callback that you should indicate for it:
fs.readFile(path[, options], callback)
So if you are not interested in the result of readFile call immediately (at the next line) after this call you can use it. Otherwise use readFile from fs/promises package that returns Promise
I have a function that writes data to a file then uploads that file to cloud storage. The file isn't finished writing before it starts uploading so I am getting a partial file in cloud storage. I found that fs.writeFileSync(path, data[, options]) could help, but I am not exactly sure how it works.
It is my understanding that node runs asynchronously and I have several async processes running prior to this portion of code. I understand what synchronous vs asynchronous means, but I am having a little trouble understanding how it plays in this example. Here are my questions if I replace the below code with fs.writeFileSync(path, data[, options])
What do the docs mean by "Synchronously append data to a file"
a. Will the next lines of code be halted until the fs.writeFileSync(path, data) is finished?
b. Are previous asynchronous processes halted by this line of code?
If other async processes are not affected how is writeFileSync different the writeFile?
Is there a callback feature in writeFileSync that I am misunderstanding?
Code for reference
outCsv = "x","y","z"
filename = "file.csv"
fs.writeFile(filename, outCsv, function (err) {
if (err) {
return console.log(err);
}
console.log('The file was saved!');
bucket.upload(filename, (err, file) => {
if (err) {
return console.log(err);
}
console.log('The file was uploaded!');
});
});
Will the next lines of code be halted until the fs.writeFileSync(path, data) is finished?
Yes. It is a blocking operation. Note that you're assuming that fs.writeFileSync does finish.
Are previous asynchronous processes halted by this line of code?
Kinda. Since JavaScript is single-threaded, they will also not be running while the file is writing but will queue up at the next tick of the event loop.
If other async processes are not affected how is writeFileSync different the writeFile?
It blocks any code that comes after it. For an easier example consider the following:
setTimeout(() => console.log('3'), 5);
console.log('1'); // fs.writeFileSync
console.log('2');
vs
setTimeout(() => console.log('3'), 5);
setTimeout(() => console.log('1'), 0); // fs.writeFile
console.log('2');
The first will print 1 2 3 because the call to console.log blocks what comes after. The second will print 2 1 3 because the setTimeout is non-blocking. The code that prints 3 isn't affected either way: 3 will always come last.
Is there a callback feature in writeFileSync that I am misunderstanding?
Don't know. You didn't post enough code for us to say.
This all begs the question of why to prefer fs.writeFile over the alternative. The answer is this:
The sync version blocks.
While it's taking however long it takes your e.g. webserver isn't handling requests.
I have a method that reads and write a log file, this method is called on every request by all users, then write the log the request path in a file. The questions are two:
Is safe read and then write a file in async mode considering concurrency questions?
If yes for the first question, the code bellow will work considering concurrency questions?
If yes for the first question how I have to do?
Please, disregard exceptions and performance questions, this is a didactic code.
var logFile = '/tmp/logs/log.js';
app.get("/", function(req){
var log = {path: req.path, date: new Date().getTime()};
log(log);
});
function log(data){
fs.exists(logFile, function(exists){
if(exists){
fs.readFile(logFile, function (err, data) {
if (err){
throw err
}
var logData = JSON.parse(data.toString());
logData.push(data);
writeLog(logData);
});
}else{
writeLog([data]);
}
});
}
function writeLog(base){
fs.writeFile(logFile, JSON.stringify(base, null, '\t'), function(err) {
if(err)
throw err;
});
}
I strongly suggest that you don't just "log asynchronously" because you want the log to be ordered based on the order things happened in your app, and there is no guarantee this will happen that way if you don't synchronize it somehow.
You can, for instance, use a promise chain to synchronize it:
var _queue = Promise.resolve();
function log(message){
_queue = _queue.then(function(){ // chain to the queue
return new Promise(function(resolve){
fs.appendFile("/tmp/logs/log.txt", new Date() + message + "\n", function(err, data){
if(err) console.log(err); // don't die on log exceptions
else resolve(); // signal to the queue it can progress
});
});
});
}
You can now call log and it will queue messages and write them some time asynchronously for you. It will never take more than a single file descriptor or exhaust the server either.
Consider using a logging solution instead of rolling your own logger btw.
In you're example you're already using the Asynchronous versions of those functions. If you're concerned about the order of your operations then you should use the synchronous versions of those functions.
readFileSync
writeFileSync
Also to note, JSON.parse() is a synchronous operation.You can make this "asynchronous" using the async module and doing a async.asyncify(JSON.parse(data.toString()));.
As noted by #BenjaminGruenbaum, async.asyncify(); doesn't actually make the operation of JSON.parse(); truly asynchronous but it does provide a more "async" style for the control flow of the operations.