NodeJS File System - node.js

modified an internal JSON file Object property via NodeJS File System through a dropdown on the UI and then when I read it again for further modification, the earlier modified value is not there. When I open the JSON file I can see the modified value but when I read the same file via 'fs.readfile' method, I debug it and I get to see the original value instead of the modified one.
Any suggestions on how to tackle this ?
I am calling this method from the respective place to read the file first and then write to it. This will be called from a separate module.
fileOperation: async function (operation, fileName, dataToWrite) {
switch (operation) {
case 'read':
return new Promise((resolve, reject) => {
fs.readFile(path.join(__dirname, '../../location/' + fileName), function (err, data) {
if (err) {
return reject(err)
} else {
const parsedData= JSON.parse(data);
return resolve(parsedData);
}
})
});
case 'write':
return new Promise((resolve, reject) => {
fs.writeFile(path.join(__dirname, '../../location/' + fileName), JSON.stringify(dataToWrite), {
'Content-Type': 'application/json'
}, function (error) {
if (error) {
return reject(error);
} else {
return resolve('File written successfully');
}
});
})
}
And on the index.js file, this one:
fs.readFile(path.join(__dirname, './location/fileName'), function (err, data) {
if (err) {
res.status(500).send(err);
} else {
fileData = JSON.parse(data).requiredKey;
}
The fileData above is not the one that I saved earlier.
Calling the methods from this switch case:
case 'dropdownSelection':
this.fileOperation('read', 'file.json', {}).then(fileData => {
fileData.key = updatedData;
this.fileOperation('write', 'file.json', fileData).then(success => {
return resolve(success);
}).catch(error => {
return reject(error)
})
}).catch(error => {
return reject(error)
})
break;

There are two likely scenarios I can think of given what you've shared in the question:
1. Timing Issue
It's possible that at the time that you read it again for further modification, the previous modification has yet to complete. File operations take time and are asynchronous, so if you don't handle it correctly, they can happen in an order that you don't expect.
The second code snippet you shared, in index.js, contains a file read. But it's not clear when that code runs. When it runs it will obtain a copy of the file as it was at that time - later updates executed by your dropdown selection will not automatically go back to the index.js unless you initiate another read after the write has completed.
2. Filepath Issue
The filepaths in your code snippets are different. In one case you have path.join(__dirname, '../../location/' + fileName) and in another case you have path.join(__dirname, './location/fileName').
It's possible that your file is writing to a location you aren't expecting. File reads and writes are done relative to current working directory. This means you might inadvertently be reading a different file than you're writing! Try navigating to the folder containing index.js, going up two folders, then into a location folder if one exists, and see if the file you're working with exists there. If it does, you'll know there's a filepath issue.

Related

How do I modify JSON files while keeping the state updated?

If I have a program as follows to modify a JSON file:
var fs = require('fs');
var dt = require('./dataWrite.json');
console.log("before",dt);
fs.readFile('./data.json', 'utf8', (err, data) => {
if (err) {
throw err;
}
else {
fs.writeFileSync('./dataWrite.json', data);
}
});
console.log("after",dt);
The console for the before and after gives me the same results. The data in the file is modified as expected though. Is there a way to always have the latest state of the file in your program?
Side question: the following code doesn't modify the files at all, I wasn't able to figure why
var fs = require('fs');
var dt = fs.readFileSync('./dataTest.json', 'utf8', function (err, data) {
if (err) {
throw err;
}
});
console.log('before', dt);
fs.readFileSync('./data.json', 'utf8', (err, data) => {
if (err) {
throw err;
}
fs.writeFileSync('./dataTest.json', data);
console.log('data', data);
});
console.log("after", dt);
It's important here to distinguish between synchronous and asynchronous logic.
Since you are using require to read in the json file, the value of the file when the program executes is read in synchronously within dt, and read in once at the beginning of the program only.
When you use the fs.readFile API, you'll notice that it is an asynchronous API and that it requires you to provide a callback to handle the file's data. This means that any execution within it is handled at a later date.
As such, your before and after code will just print the same contents.
If you console.log(dt) after executing fs.writeFileSync you will still see the old value since dt is holding the old value known at the beginning of the program and not the latest value, but if you update the variable to the contents of the file after rereading the file, then you would see the latest contents.
e.g.
...
fs.writeFileSync('./dataWrite.json', data);
dt = fs.readFileSync('./dataWrite.json', 'utf8');
console.log(dt);
...
See fs.readFileSync.

fs.writeFile (node) adds information on an already existing file instead of replacing this

I made a code to write a file using fs and node-cron to run this every x minutes. I get the data the first time, but in the next job I get the data added again in the file and the old one too, I wan to create a new file and replace the old one (and the previous information) but I still have this appended instead of only the new data,
fs.writeFile(path.join(__dirname, '_data', 'data.json'), JSON.stringify(data, this, 2), {flag: 'w'}, err => {
if (err) {
console.error(err);
} else {
console.log("Success");
}
});
I was able to understand my error, I should initialize again variable data in order to clear previous results.
//Save data into fs
fs.writeFile(path.join(__dirname, '_data', 'varlix.json'), JSON.stringify(varlix, this, 2), err => {
if (err) {
console.error(err);
} else {
console.log("Success");
}
});
data = [];

Node.js image convert promise, still get broken image

I am using imagemagick to covert file on node.
router.post('/route, upload.array('file'), async (req, res, next) => {
const files = req.files;
Promise.all(files.forEach(async (file) =>{
await fileconvert(file);
}));
...some db create
return res.send(createdId)
}
//from here different file
import im from 'imagemagick';
function fileconvert(file) {
return new Promise((resolve,reject)=>{
if(file.mimetype === "specialcase") {
im.convert(
[
file.path, '-format', 'png', 'newFile.png'
],
(err, stdout) => {
if(err) reject(err)
resolve(stdout);
}
)
}
//if not special-case without this, it doesn't work.
resolve();
})
}
In front-side, I'm using axios api call.
I am hoping that after fileconvert finished, then res.send(createdId).
Everything works fine, except that if too large file converted, I get broken image.
It may has file on exact directory but it hasn't converted fully yet.
It seems if file created, PASS. How can I get fully converted image...
Process on front-side is "Upload file => if createdId exists => route page
Thanks for your help.

How to delete files in tmpfs or /tmp directory for Google Cloud Functions?

I have a google cloud function I've written in Node.js which I have set to execute every 4 hours using the cloud scheduler. Basically the function populates a table I have in BigQuery. My function doesn't explicitly write anything to the tmp directory, it just stores things in local variables for streaming insertion into BigQuery like so:
var rows = []
// code which fills the rows array
await bigqueryClient
.dataset(datasetId)
.table(tableId)
.insert(rows)
I've noticed (looking at the graph of memory usage) that the memory used with each invocation goes up until it reaches the limit set for the function (I have 512MB for this one), and then I get an error logged - "Error: memory limit exceeded. Function invocation was interrupted."
I've tried the below code to try to remove all files in the tmp directory, but it seems like there are no files to delete:
const directory = '/tmp';
fs.readdir(directory, (err, files) => {
if (err) throw err
console.log(files)
for (const file of files) {
fs.unlink(path.join(directory, file), err => {
if (err) throw err
})
}
})
What gets logged is just an empty array '[ ]'.
What am I missing here? Is the problem something else?
Your code will delete all files the next time it runs, but if you created a file with something like const ws = fs.createWriteStream('/tmp/${fileName}');, the file will not be deleted until the next run.
If you want to delete all files as part of the current process, then you can separate the part that creates a file(s) from the file cleanup and wrap both in a Promise.
This will delete all files in /tmp.
const directory = '/tmp';
const fileName = `test_file_1.gz`;
function doSomething() {
return new Promise((resolve) => {
const ws = fs.createWriteStream(`/tmp/${fileName}`);
resolve();
});
}
function fileCleanup() {
return new Promise((resolve) => {
fs.readdir(directory, (err, files) => {
if (err) throw err;
console.log(files);
for (const file of files) {
fs.unlink(path.join(directory, file), err => {
if (err) throw err;
console.log(`${file} was deleted`);
});
}
});
resolve();
});
}
doSomething().then(() => { fileCleanup(); }).then(() => { res.status(200).send('OK'); }).catch((err) => { throw new Error(err); });
Remove res.status(200).send('OK'); if it is not an HTTP trigger. Also, for local testing, I create a directory called tmp. The Cloud Function uses /tmp but locally it is ./tmp.

Node.js file uploading using Formidable - events are not getting fired

I am trying to make a file upload using Node.js and the Formidable module.
npm install formidable
And then I made this, please read the notes - where I can explain what each function does and describes the algorithm:
// get access to the files that were sent;
// at this time I don't want the files to be uploaded yet;
// in the next function I will validate those files.
function form_parse() {
form.parse(req, (err, fields, files) => {
if (err) return req.Cast.error(err);
if (Object.keys(files).length==0) return req.Cast.badRequest();
req.files = files;
return validate_files();
});
}
// I made an object with options to validate against the
// files. it works and continues to the process_files()
// function only whether files are verified.
function validate_files() {
let limitations = require('../uploads-limitations');
try {
limitation = limitations[req.params.resource];
} catch(err) {
return req.Cast.error(err);
}
let validateFiles = require('../services/validate-files');
validateFiles(req, limitation, err => {
if (err) return req.Cast.badRequest(err);
return process_files();
});
}
// here is the problem - form.on doesn't get fired.
// This is the time I want to save those files - after
// fully verified
function process_files() {
form.on('file', function(name, file) {
console.log(`file name: ${file.name}`);
file.path = path.join(__dirname, '../tmp_uploads/' + file.name);
});
form.on('error', err => {
return req.Cast.error(err);
});
form.on('end', () => {
console.log(`successfully saved`);
return req.Cast.ok();
});
}
form_parse();
As you can see and as I have described - the validation works but when I want to actually save those files the form.on (events) are not getting fired.
Yes because at the end of your process, after parsing and validating, you attach events listeners. This should be done first, before starting parsing. Because these events (on file, on error, on end) happen during the parsing, not after.
form.on('file',...) // First, attach your listeners
.on('error', ...)
.on('end', ...);
form.parse(req) // then start the parsing

Resources