NodeJS - If moving multiple files fails, deleting ones that failed - node.js

I'm trying to upload multiple files with an HTTP post, and then NodeJS handles:
save files' info to database
move files from tmp folder to permanent folder
if any file move fails, delete the file from tmp folder
My two issues are described in the comments within code snippet below:
path.resolve isn't working
iterator isn't working within fs.rename
for (i = 0; i < req.files.length; i++) {
const file = new fileSchema({
_userId: req.body._userId,
_companyId: req.body._companyId,
_meetingId: response._id,
originalFilename: req.files[i].originalname,
savedFilename: req.files[i].filename,
});
file.save().then((response) => { }).catch(error => { console.log(error) });
const currentPath = path.resolve(temp_folder, req.files[i].filename);
const newPath = upload_folder +"/"+ req.body._userId +"/"+ req.body._companyId +"/"+ response._id +"/"+ req.files[i].filename;
// 1. why doesn't path.resolve work with the inputs on the line above? I have to concat a string as in line above?
fs.rename(currentPath, newPath, function(err) {
if (err) {
console.log("Error moving files");
try { removeTempFiles(temp_folder, req.files[i]); } // helper function which works written elsewhere
// 2. req.files[i] is undefined (even though req.files works) so the line above fails - i.e. the iterator isn't captured within rename?
catch(err) { console.log(err); }
} else {
console.log("Successfully moved the file!");
}
});
}
Any help appreciated, thanks.

Change this
for (i = 0; i < req.files.length; i++) {
to this:
for (let i = 0; i < req.files.length; i++) {
The addition of let will create a separate i for each iteration of the for loop so it will stay valid inside your fs.rename() callback.
And, path.join(), is probably a better choice than path.resolve() for combining path segments.

Related

Node Js Acessing a Jason File

still new to JSON and while ive searched the net and created a function to create a init file if none exists i'm coming up blank for search and retrive the data of the new existing file or how I add new entries or update new entries
so far i can do a read file and export the resits in a console log so i know the assignment work, its a global variable so the data should persist out of the read file loop but when i try and access it later to make the local array i'll pull data from and use for updating later it reads as undefined.
fs.readFile(path, 'utf8', (error, data) => {
if(error){
console.log(error);
return;
}
//console.log(JSON.parse(data));
JSONData = JSON.parse(data);
for (let i = 0; i < JSONData.length; i++) {
console.log(i+": ["+JSONData[i].unique+"] "+JSONData[i].name);
}
});//fs.readFile
var playerKey = "KuroTO";
playerKey = playerKey.toLowerCase();
for (let i = 0; i < JSONData.length; i++) {
if (JSONData[i].unique.toLowerCase() == playerKey){
console.log("["+i+"] "+JSONData[i].unique.toLowerCase()+": "+playerKey);
PlayerCard1.push(JSONData[i].userid);//0
PlayerCard1.push(JSONData[i].username);//1
PlayerCard1.push(JSONData[i].unique);//2
PlayerCard1.push(JSONData[i].name);//3
PlayerCard1.push(JSONData[i].avatarurl);//4
PlayerCard1.push(JSONData[i].level);//5
PlayerCard1.push(JSONData[i].Rank);//6
PlayerCard1.push(JSONData[i].henshined);//7
PlayerCard1.push(JSONData[i].Strength);//8
PlayerCard1.push(JSONData[i].Perception);//9
PlayerCard1.push(JSONData[i].Endurance);//10
PlayerCard1.push(JSONData[i].Wisdom);//11
PlayerCard1.push(JSONData[i].Intelligence)//12;
PlayerCard1.push(JSONData[i].Luck)//13;
PlayerCard1.push(JSONData[i].Agility)//14;
PlayerCard1.push(JSONData[i].Flexability)//15;
PlayerCard1.push(JSONData[i].RatedSpeed)//16;
};//if unique matches
};//for
this is ther psudo code concept im trying to do
if (JSONData.stringify.unique == {SearchUID}){toonname = JSONData.stringify.name;}
as i understand it you cant really apend just rewrite the file over again with new data and i think i can figure that out on my own once i cand figure out how to real the file into an array i can search like above
To read JSON, simply require the file.
JSON:
{
"key": "H"
}
JS:
let jsonFile = require("./path/to/json");
console.log(jsonFile.key); // H
Editing is just as simple.
let jsonFile = require("./path/to/json");
jsonFile.key = "A"
console.log(jsonFile.key) // A
Saving edits requires use of FileSystem:
const fs = require("fs")
let jsonFile = require("./path/to/json");
jsonFile.key = "A"
// first argument is the file path
// second argument is the JSON to write - the file is overwritten already
// due to above, so just JSON.stringify() the required file.
// third argument is an error callback
fs.writeFile("./path/to/jsonFile", JSON.stringify(jsonFile), (err) => {
if (err) throw new Error(err);
});
This can also be used to slightly clean up your current init function if you wanted, but that's up to you of course.

Loop problems with Telegram bot

I'm currently developping a telegram bot.
Here's my code:
bot.command('check', ctx => {
console.log(ctx.from.id, ctx.chat.id)
var files = getFilesFromDir("toSend", [".txt"])
if(files.length > 0){
for (i = 0; i < files.length; i++) {
const url = 'https://api.telegram.org/bot'+bot_token+'/sendDocument'
let r = request(url, (err, res, body) => {
if(err) console.log(err)
console.log(body)
})
console.log(files[i])
let f = r.form()
f.append('chat_id', '476090013')
f.append('document', fs.createReadStream("tosend/"+files))
}
}else{
console.log('r')
}
})
My problems is the loop, I tried the for but I can't figured out.
I have a folder named tosend, I want that every file in there to be transfered to telegram api.
For one file it's working but if in the folder there are more than one file, f.append('document', fs.createReadStream("tosend/"+files)) the +files take every name instead of one.
You're trying to create a stream from an array of files: +files (which will return a comma separated string with all the filenames).
You need to use files[i] to create a stream per file.
f.append('document', fs.createReadStream("tosend/" + files[i]))

How create a service in Angular 7 that use FS module to list files from directory?

I´m coding an app based on Node v10.13.0, Electron 3.0.7 & Angular 7.
A service is in charge of file procesing, a function named(listFiles) read and return an array of filenames contained in a directory.
listFiles(path):Observable<string[]>{
this.files_in_directories = [];
return of(this.fs.readdir(path, function(err, items) {
if (err) {
console.log(err);
return;
}
for (var i=0; i<items.length; i++) {
this.files_in_directories.push(items[i]);
console.log(items[i]);
}
return this.files_in_directories;
}.bind(this)));
}
My code is based on Angular Guide: https://angular.io/tutorial/toh-pt4#observable-heroservice
In the component, the service is called:
ngOnInit() {
this.getScripts('Project');
}
getScripts(project){
this.generalService.listFiles(this.config.location.datastore+'\\'+project).subscribe(files=>{
this.scripts = files;
this.cdref.detectChanges();
});
}
When this code is executed, the files are readed but never returned, as an Observable, instead an undefined value is obtained(See the image):
Undefined Values Debugger
I'm a little bit lost how to proceed from this point foward, the files in the directory are correctly listed.File Listed in console
I belive that problem comes from returning the array inside a callback. Any ideas.
#NishKal Kashyap, you are rigth men, before see you response I modified the code in the service to read the files in the directory synchronously. And worked just fine, thankyou for the sugestion.
listFiles(path):Observable<string[]>{
this.files_in_directories = [];
this.fs.readdirSync(path).forEach(file => {
this.files_in_directories.push(file);
});
return of(this.files_in_directories);
}
You identified the problem correctly, perhaps modify your code like this:
listFiles(path):Observable<string[]>{
this.files_in_directories = [];
this.fs.readdir(path, function(err, items) {
if (err) {
console.log(err);
return;
}
for (var i=0; i<items.length; i++) {
this.files_in_directories.push(items[i]);
console.log(items[i]);
}
});
of(this.files_in_directories);

Nodejs Read very large file(~10GB), Process line by line then write to other file

I have a 10 GB log file in a particular format, I want to process this file line by line and then write the output to other file after applying some transformations. I am using node for this operation.
Though this method is fine but it takes a hell lot of time to do this. I was able to do this within 30-45 mins in JAVA, but in node it is taking more than 160 minutes to do the same job. Following is the code:
Following is the initiation code which reads each line from the input.
var path = '../10GB_input_file.txt';
var output_file = '../output.txt';
function fileopsmain(){
fs.exists(output_file, function(exists){
if(exists) {
fs.unlink(output_file, function (err) {
if (err) throw err;
console.log('successfully deleted ' + output_file);
});
}
});
new lazy(fs.createReadStream(path, {bufferSize: 128 * 4096}))
.lines
.forEach(function(line){
var line_arr = line.toString().split(';');
perform_line_ops(line_arr, line_arr[6], line_arr[7], line_arr[10]);
}
);
}
This is the method that performs some operation over that line and
passes the input to write method to write it into the output file.
function perform_line_ops(line_arr, range_start, range_end, daynums){
var _new_lines = '';
for(var i=0; i<days; i++){
//perform some operation to modify line pass it to print
}
write_line_ops(_new_lines);
}
Following method is used to write data into a new file.
function write_line_ops(line) {
if(line != null && line != ''){
fs.appendFileSync(output_file, line);
}
}
I want to bring this time down to 15-20 mins. Is it possible to do so.
Also for the record I'm trying this on a intel i7 processor with 8 GB of RAM.
You can do this easily without a module. For example:
var fs = require('fs');
var inspect = require('util').inspect;
var buffer = '';
var rs = fs.createReadStream('foo.log');
rs.on('data', function(chunk) {
var lines = (buffer + chunk).split(/\r?\n/g);
buffer = lines.pop();
for (var i = 0; i < lines.length; ++i) {
// do something with `lines[i]`
console.log('found line: ' + inspect(lines[i]));
}
});
rs.on('end', function() {
// optionally process `buffer` here if you want to treat leftover data without
// a newline as a "line"
console.log('ended on non-empty buffer: ' + inspect(buffer));
});
I can't guess where the possible bottleneck is in your code.
Can you add the library or the source code of the lazy function?
How many operations does your perform_line_ops do? (if/else, switch/case, function calls)
I've created a example based on your given code, I know that this does not answer your question but maybe helps you understand how node handles such case.
const fs = require('fs')
const path = require('path')
const inputFile = path.resolve(__dirname, '../input_file.txt')
const outputFile = path.resolve(__dirname, '../output_file.txt')
function bootstrap() {
// fs.exists is deprecated
// check if output file exists
// https://nodejs.org/api/fs.html#fs_fs_exists_path_callback
fs.exists(outputFile, (exists) => {
if (exists) {
// output file exists, delete it
// https://nodejs.org/api/fs.html#fs_fs_unlink_path_callback
fs.unlink(outputFile, (err) => {
if (err) {
throw err
}
console.info(`successfully deleted: ${outputFile}`)
checkInputFile()
})
} else {
// output file doesn't exist, move on
checkInputFile()
}
})
}
function checkInputFile() {
// check if input file can be read
// https://nodejs.org/api/fs.html#fs_fs_access_path_mode_callback
fs.access(inputFile, fs.constants.R_OK, (err) => {
if (err) {
// file can't be read, throw error
throw err
}
// file can be read, move on
loadInputFile()
})
}
function saveToOutput() {
// create write stream
// https://nodejs.org/api/fs.html#fs_fs_createwritestream_path_options
const stream = fs.createWriteStream(outputFile, {
flags: 'w'
})
// return wrapper function which simply writes data into the stream
return (data) => {
// check if the stream is writable
if (stream.writable) {
if (data === null) {
stream.end()
} else if (data instanceof Array) {
stream.write(data.join('\n'))
} else {
stream.write(data)
}
}
}
}
function parseLine(line, respond) {
respond([line])
}
function loadInputFile() {
// create write stream
const saveOutput = saveToOutput()
// create read stream
// https://nodejs.org/api/fs.html#fs_fs_createreadstream_path_options
const stream = fs.createReadStream(inputFile, {
autoClose: true,
encoding: 'utf8',
flags: 'r'
})
let buffer = null
stream.on('data', (chunk) => {
// append the buffer to the current chunk
const lines = (buffer !== null)
? (buffer + chunk).split('\n')
: chunk.split('\n')
const lineLength = lines.length
let lineIndex = -1
// save last line for later (last line can be incomplete)
buffer = lines[lineLength - 1]
// loop trough all lines
// but don't include the last line
while (++lineIndex < lineLength - 1) {
parseLine(lines[lineIndex], saveOutput)
}
})
stream.on('end', () => {
if (buffer !== null && buffer.length > 0) {
// parse the last line
parseLine(buffer, saveOutput)
}
// Passing null signals the end of the stream (EOF)
saveOutput(null)
})
}
// kick off the parsing process
bootstrap()
I know this is old but...
At a guess appendFileSync() _write()_s to the file system and waits for the response. Lots of small writes are generally expensive, presuming you use a BufferedWriter in Java you might get faster results by skipping some write()s.
Use one of the async writes and see if node buffers sensibly, or write the lines to large node Buffer until it is full and always write a full (or nearly full) Buffer. By tuning the buffer size you could validate if the number of writes affects perf. I suspect it would.
The execution is slow, because you're not using node's asynchronous operations. In essence, you're executing the code like this:
> read some lines
> transform
> write some lines
> repeat
While you could be doing everything at once, or at least reading and writing. Some examples in the answers here do that, but the syntax is at least complicated. Using scramjet you can do it in a couple simple lines:
const {StringStream} = require('scramjet');
fs.createReadStream(path, {bufferSize: 128 * 4096})
.pipe(new StringStream({maxParallel: 128}) // I assume this is an utf-8 file
.split("\n") // split per line
.parse((line) => line.split(';')) // parse line
.map([line_arr, range_start, range_end, daynums] => {
return simplyReturnYourResultForTheOtherFileHere(
line_arr, range_start, range_end, daynums
); // run your code, return promise if you're doing some async work
})
.stringify((result) => result.toString())
.pipe(fs.createWriteStream)
.on("finish", () => console.log("done"))
.on("error", (e) => console.log("error"))
This will probably run much faster.

Access name of file being written by fs.writeFile

I'm using fs.writeFile to write a number of files to a directory. I'd like to use the name of the file being written in a callback (ex. printing "file x has been written").
How can I get the name of the file being written (x)?
Using the filename variable I passed to writeFile will not work due to writeFile being asynchronous.
Thanks!
So I think you're doing something like this (just guessing, since you're not posting code):
var files = [ 'file1.txt', 'file2.txt', 'file3.txt' ];
for (var i = 0; i < files.length; i++) {
var filename = files[i];
fs.writeFile(filename, CONTENTS, function(err) {
console.log('File written:', filename);
});
}
That's not going to work because filename will be reused and overwritten for each iteration.
The easiest solution would be to use forEach:
files.forEach(function(filename) {
fs.writeFile(filename, CONTENTS, function(err) {
console.log('File written:', filename);
});
});
That will create a newly scoped filename variable for each iteration, which won't be overwritten.

Resources