I need to read a file line by line and write newlines to same file while reading, if each line satisfy certain set of conditions. What could be the best way.
function (file, callback) {
fs.readFile(file, (err, 'utf8', data) => {
if (err) return callback(err);
var lines = data.split('\n');
fs.open(file, 'w', (err, fd) => {
if (err) return callback(err)
lines.forEach(line => {
if (line === 'meet your condition') {
// do your write using fs.write(fd, )
}
})
callback();
})
})
}
use node fs module with the help of fs you can perform operation asynchronously as well as synchronously. below is as an example of asynchronously
function readWriteData(savPath, srcPath) {
fs.readFile(srcPath, 'utf8', function (err, data) {
if (err) throw err;
//Do your processing, MD5, send a satellite to the moon or can add conditions , etc.
fs.writeFile (savPath, data, function(err) {
if (err) throw err;
console.log('complete');
});
});
}
Synchronously example
function readFileContent(srcPath, callback) {
fs.readFile(srcPath, 'utf8', function (err, data) {
if (err) throw err;
callback(data);
}
);
}
function writeFileContent(savPath, srcPath) {
readFileContent(srcPath, function(data) {
fs.writeFile (savPath, data, function(err) {
if (err) throw err;
console.log('complete');
});
});
}
Related
I'm trying to work with CosmosDB in Nodejs. I have created some Documents in a collection and have added a couple attachments to one of the documents like so:
let dbds = new azure(dbEndpoint, {"masterKey":dbKey})
fs.open("c:/temp/capture.png", "r", (err, fd) => {
if(err) console.log(err);
else{
dbds.createAttachmentAndUploadMedia(documentLink, fs, {contentType:"image/png", partitionKey: partitionKey}, (err, result) =>{
fs.close(fd);
if(err) console.log(err);
else console.log(result);
}
}
}
Now when I read that attachments for that document, I get 2 attachments:
dbds.readAttachments(documentLink, {"partitionKey":partitionKey}).toArray((err, result) => {
if(err) console.log(err);
else {
console.log(result.length); //2
result.forEach(i => console.log(i.id);) //id of each attachment
}
}
Now I need to be able to read the document back and store it locally. There doesn't seem to be any documentation that I can find for doing this in Nodejs. I've tried the following:
let attachment = result[0]; //from result of toArray() above -- first attachment from list
let mediaURI = `${attachment._self}${attachment.media}`
dbds.readMedia(mediaURI, (err, result) => {
if(err) console.log(JSON.parse(err.body).message); //Request url is invalid
else {
//try to write result to file
}
}
How do I create a valid URI to the media in order to download it?
Edit
Based on comments below, I updated my code as follows:
let attachment = result[0]
let mediaURI = attachment.media //here is the change
dbds.readMedia(mediaURI, (err, result) => {
if(err) console.log(JSON.parse(err.body).message);
else {
//try to store the data
}
})
I no longer get an error but instead get a JSON object:
"{"constants":{"O_RDONLY":0,"O_WRONLY":1,"O_RDWR":2,"S_IFMT":61440,"S_IFREG":32768,"S_IFDIR":16384,"S_IFCHR":8192,"S_IFLNK":40960,"O_CREAT":256,"O_EXCL":1024,"O_TRUNC":512,"O_APPEND":8,"F_OK":0,"R_OK":4,"W_OK":2,"X_OK":1},"F_OK":0,"R_OK":4,"W_OK":2,"X_OK":1}"
I got help from some folks at Microsoft on this.
The primary issue was that I wasn't writing the file correctly.
Instead of this
let dbds = new azure(dbEndpoint, {"masterKey":dbKey})
fs.open("c:/temp/capture.png", "r", (err, fd) => {
if(err) console.log(err);
else{
dbds.createAttachmentAndUploadMedia(documentLink, fs, {contentType:"image/png", partitionKey: partitionKey}, (err, result) =>{
fs.close(fd);
if(err) console.log(err);
else console.log(result);
}
}
}
With fs.open fs is not a filestream -- which is what createAttachmentAndUploadMedia is looking for. I had mistakenly assumed that fs was a fs.open created a filestream.
What I wound up doing was the following:
fs.readFile("c:/temp/myimage.jpg",(err, data) => {
if(err) reject(err);
else dbds.svc.createAttachmentAndUploadMedia(docPath,data,{contentType: "image/jpeg"
, partitionKey: "Key"}, (err, result) =>{..}
})
In your example, the mediaURI should just be ${attachment.media} not concatenated with the ${attachment._self}.
Here is an snippet:
return new Promise<any>((resolve, reject) => {
this.documentDbClient.readAttachment(docLink, options, (err, result) => {
if (err)
return reject(err);
resolve(result.media);
});
}).then((medialink) => {
return new Promise<any>((resolve, reject) => {
this.documentDbClient.readMedia(medialink, (err, result) => {
if (err)
return reject(err);
// buffered data
const data = [];
result.on('data', (chunk) => data.push(chunk));
result.on('end', () => {
resolve(data.join(''));
});
// if not buffered
// resolve(result.toString());
});
});
});
I am new to nodejs and trying to cat multiple css files on-the-fly while coding. The package chokidar allow me to call a function when a file is modified, however I have a problem with the execution.
var goconcat =
fs.readdir(paths, function (err, files) {
if (err) {console.log(err);}
fs.unlink(paths + 'concat.css', function (err) {
if (err) throw err;
var list = files.map(function (files) {
return path.join(paths, files);
});
concat(list, paths + 'concat.css', function(err) {
if (err) throw err
});
});
});
I want to first delete the previous file, then read the directory and then write a new "concat.css". However I have an error;
Error: ENOENT: no such file or directory, open 'public/css/concat.css'
at error (native)
It appears that the function concat() is executed before the directory update and not after, and therefore it is trying to cat a file that just have been deleted. Why ?
I know that nodejs is executing functions in a synchronous way but I can't find a way to solve this problem. I tried async but I can't declare a variable between two functions and I couldn't manage to make it work.
If it cannot exist in a callback, using the setTimeout(fn, 0) trick may help make sure it's executed after the variable assignment.
var goconcat =
fs.readdir(paths, function (err, files) {
if (err) {console.log(err);}
fs.unlink(paths + 'concat.css', function (err) {
if (err) throw err;
var list = files.map(function (files) {
return path.join(paths, files);
});
setTimeout(function() {
concat(list, paths + 'concat.css', function(err) {
if (err) throw err
})}, 0);
});
});
The problem you're having is that your concat function is being invoked before the file is deleted by invoking unlink. You can prevent this by having nested callbacks; however, you can probably have better control flow if you use a module like async, and prevent yourself from dealing with Callback Hell.
Below is an example on how you can use the async module.
var fs = require('fs');
var async = require('async');
var myDir = __dirname + '/data';
async.waterfall([function(callback) {
fs.readdir(myDir, 'utf-8', function(error, files) {
if (error) {
return callback(error);
}
return callback(null, files);
});
}, function(files, callback) {
fs.open(myDir + '/myFile', 'wx', function(error, f) {
if (error && error.code === 'EEXIST') {
return callback(null, 'EEXIST');
}
return callback(null, 'CREATE');
});
}, function(fileStatus, callback) {
if (fileStatus === 'EEXIST') {
console.log('File exists. Deleting file...');
fs.unlink(myDir + '/myFile', function(error) {
if (error) {
return callback(error);
} else {
return callback(null);
}
});
} else {
console.log('File does not exist...');
return callback(null);
}
}, function(callback) {
fs.writeFile(myDir + '/myFile', "Hello World", function(err) {
if(err) {
return callback(error);
}
return callback(null, 'File Created');
});
}], function(error, results) {
console.error(error);
console.log(results);
});
The waterfall function runs the tasks array of functions in series,
each passing their results to the next in the array. However, if any
of the tasks pass an error to their own callback, the next function is
not executed, and the main callback is immediately called with the
error.
This is my code for JScript file
fs.open("SensorDataValue.txt",'a' , function(err, fd) {
if(err) {
throw 'error opening file: ' + err;
}
fs.write(fd, buffer, 0, buffer.length, null, function(err) {
if (err) throw 'error writing file: ' + err;
fs.close(fd, function() {
console.log("V 14 The file was saved!" + "\r\n");
});
This will keep appending any text to the 'SensorDataValue" text file.
However, what I want to achieve is :
To clear the content of text file before it appends. So every time, I use the script file, it will always clear the text file FIRST before appending any data in it.
How do I go about achieving this?
var data = 'Hello World!!'
var fs = require('fs');
fs.unlink(__dirname + '/proto.js', (err) => {
if (err) {
console.log(err);
}
writeData(data)
})
function writeData(data) {
fs.writeFile(__dirname + '/proto.js', data, (err, data) => {
if (err) {
console.log(err, data);
} else {
console.log('Data uploaded')
}
})
}
Lets say, for example, I want to write a nodejs program where I have two or three independent parts like fs.readdir, fs.copy, etc. on different locations, but the result all three actions is to be sent to a json file like this:
var fs = require('fs-extra');
var jsd = {
"act1" : false,
"act2" : false,
"act3" : false
}
fs.readdir(path1, function (err, files) {
if (err) jsd.act1 = err;
for (x in files) console.log(files[x]);
jsd.act1 = true;
});
fs.copy(path2, path3, function (err) {
if (err) jsd.act2 = err;
jsd.act2 = true;
});
fs.remove(path4, function (err) {
if (err) jsd.act3 = err;
jsd.act3 = true;
});
// all three of the above actions are independent, so it makes sense that all of them are executed asynchronously.
// Now we write jsd object to a json file; jsd's contents are dependent on the above actions though
fs.writeJson("./data.json", jsd, function (err, files) {
if (err) return console.error(err);
});
How do I make sure that the correct data is entered into the file data.json, i.e fs.writeJson executes after the actions previous to it are executed first?
I know one way is to nest all of them, i.e,
readdir() {
copy() {
remove() {
writeJson();
}
}
}
But this may result in callback hell, so is there a better way to do this?
you can use Promise or module async,
if you use promise, first you must convert all callback function into Promise like this:
const reddir = function(path) {
return new Promise((resolve, reject) => {
fs.readdir(path, (err, files) => {
if (err) return reject(err);
for (x in files) console.log(files[x]);
resolve(true);
});
})
}
then you can use
Promise.all([reddir(path1), copy(path2, path3), remove(path4)])
.spread((act1, act2, act3) => { //.spread is bluebird feature
return writeJson(./data.json);
})
.catch(e => {
// all error can handled in this
})
if you use async module, you can write like this:
async.parallel({
act1: function(cb){
fs.reddir(path1, (err, files) => {
if (err) return cb(err);
for (x in files) console.log(files[x]);
cb(true);
})
},
act2: ...
},(err, jsd) => { // jsd will be {act1: true, act2: ...}
if (err) return console.error(err); // handle all above error here;
fs.writeJson("./data.json", jsd, function (err, files) {
if (err) return console.error(err);
});
})
My requirement is like below:
I have 3 Big files big_file_1, big_file_2 and big_file_3. I want to read this 3 big files asynchronously and process my rest of the code only after the reading of all files are completed.
fs.readFile('big_file_1', function (err, data) {
if (err) throw err;
content_1 = data;
});
fs.readFile('big_file_2', function (err, data) {
if (err) throw err;
content_2 = data;
});
fs.readFile('big_file_3', function (err, data) {
if (err) throw err;
content_3 = data;
});
// Do something with content_1, content_2 and content_3
How can I achieve this in Node.JS ?
You can do it using the parallel function of the async library:
async.parallel([
fs.readFile.bind(fs, 'big_file_1'),
fs.readFile.bind(fs, 'big_file_2'),
fs.readFile.bind(fs, 'big_file_3')
], function (err, results) {
if (err) throw err;
content_1 = results[0]
content_2 = results[1]
content_3 = results[2]
/* TODO do some cool stuff */
})
Alternatively, you can do it manually:
int steps_done = 0
fs.readFile('big_file_1', function(err, data) {
if (err) throw err
content_1 = data
if (++steps_done == 3) do_next_step()
})
fs.readFile('big_file_2', function(err, data) {
if (err) throw err
content_2 = data
if (++steps_done == 3) do_next_step()
})
fs.readFile('big_file_3', function(err, data) {
if (err) throw err
content_3 = data
if (++steps_done == 3) do_next_step()
})