Bad file descriptor, read, while extracting zip file using node-stream-zip - node.js

I have a zip file that has a folder like
1234/pic1.png
1234/pic2.png
1234/data.xlsx
I am trying to extract the spreadsheet (failing that, all files) using node-stream-zip.
const StreamZip = require('node-stream-zip');
const zip = new StreamZip({
file: path.join(downloadsDir, fileToFind),
storeEntries: true
});
zip.on('ready', () => {
if(!fs.existsSync('extracted')) {
fs.mkdirSync('extracted');
}
zip.extract('1234/', './extracted', err => {
console.log(err);
});
zip.close();
});
This produces
EBADF: bad file descriptor, read
In the extracted folder is one of the png files. But when following the guide to extract just the xlsx file it appears that the xlsx file is the one causing this error.
zip.extract('1234/data.xlsx', './extracted.xlsx', err => {
console.log(err);
});
Is the problem with the xlsx file? I can open it manually. Is it permissions-related? Node? This particular package?

Your problem is related to zip.close(). You're closing it on the same tick as you're invoking zip.extract().

Related

Writing data to csv file in node.js results in weird chinese characters in the resulting csv file

I am trying to write an array of objects into a csv file in node.js. I have the following code:
fs=require('fs');
const data=[{name:'John'},{name:'Peter' }];
fs.writeFile('test.csv', data, 'utf8', function (err) {if (err)
{console.log('Some error occured - file either not saved or corrupted file saved.');
} else
{console.log('It\'s saved!');
}});
However when i open the saved csv file I have weird chinese characters only in the file. Would anyone have a clue what's going on here?
PS: I am on Windows; node version is 10.15.0
The data has to passed as a string - You can use JSON.stringify() to convert JavaScript Object(including Arrays) to a string.
https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback
const fs = require('fs');
const data = [{name:'John'},{name:'Peter' }];
fs.writeFile('test.csv', JSON.stringify(data), 'utf8', function (err) {if (err)
{console.log('Some error occured - file either not saved or corrupted file saved.');
} else
{console.log('It\'s saved!');
}});
Note: From the data you are passing, this wouldn't be a csv file, most likely a JSON file.

Read files from inout dir, apply the regex, write all files to output dir all at once

I have two folders, input and output folder with many text files in the below format. How do I read all the files from the input folder,run the regex and write all the updated files to another output folder?I am using nodejs.
Input: $.Carpool[0].NoofSeats], [$.Carpool[1].NoofSeats]
So after replace with regex the updated text file should be:
Regex: str = str.replace(/\.[A-Z]/g, (m0) => m0.toLowerCase());
Output: [$.carpool[0].noOfSeats], [$.carpool[1].noOfSeats]
So far I got to reading files from the directory:
const fs= require("fs");
let directory = "Input" // Desktop/Input
let files = fs.readdirSync(directory)
console.log(files);
You want to loop through the files, assuming if the contents are a text file in UTF-8 format here is an example.
You use fs.readFile to read a specific file after listing directory.
Then use fs.writeFile to write a new file with contents.
I use /directory/${f} for the new file directory path and ${f} for filename that was opened.
const fs = require("fs");
// Directory
let directory = "/";
// Files
let files = fs.readdirSync(directory);
// Loop through the files
files.forEach(f => {
// Read the contents in UTF-8 format
fs.readFile(f, 'utf8', function(err, contents) {
if (err) { console.log(err); }
// Output Contents
console.log(contents);
// Perform regex here
contents = contents.replace(/\.[A-Z]/g, (m0) => m0.toLowerCase());
// Write new file to path /new, with contents
fs.writeFile(`/directory/${f}`, contents, function(err) {
if (err) {
// Error writing
return console.log(err);
}
console.log("The file was saved!");
});
});
});

How to add a line at a specific line row to a file's content using node.js and Promises

Situation: I made some code that loops through a main directory and its subdirectories looking for htm files, once it finds the .htm file it is supposed to add a line after the head tag, then it should loop further looking for all other .htm files in the main directory and perfmoring the same action.
The code:
var fs = require("fs");
var path = require("path");
var mainDirectory = 'directory';
function addLineInFile() { //execute the steps needed to alter the file's content in sequential order.
readContent() //can be found below.
.then(lookForHead) //can be found below.
.then(writeNewContent) //can be found below.
}
addLineInFile();
function readContent() {
return new Promise((resolve, reject) => {
FileContent = [];
fs.readFile(extendedDirectoryPath, (err, data) => {
fileContent = data.toString('utf8').split("\n");
console.log("Read file content")
});
if (err) {
reject(err);
} else {
resolve(FileContent);
}
});
}
function lookForHead(FileContent) {
return new Promise((resolve, reject) => {
var string = "<head>"
for (i = 0; i < FileContent.length; i++) {
console.log("Looking for <head>.")
if (FileContent[i].indexOf(string) !== -1) {
console.log("found <head>")
FileContent.splice(i + 1, 0, 'line to be added')
}
}
if (err) {
reject(err);
} else {
resolve(FileContent);
}
});
}
function writeNewContent(FileContent) {
return new Promise((resolve, reject) => {
console.log("Started Writing to the file!")
var file = fs.createWriteStream(extendedDirectoryPath);
file.on('error', function(err) { /* error handling */ });
FileContent.forEach(function(v) {
file.write(v.join(', ') + '\n');
console.log("Wrote a line to the file.")
});
file.end();
if (err) {
reject(err);
} else {
resolve(FileContent);
}
});
}
Problem: The file is written to BEFORE the content that has to be written to it is ready (Take a look at the output). So the writeNewContent() is executed before the readContent() and lookForHead() are done with giving it its content to be written in the file. I've tried so many different things before this like callback functions and was convinced Promises would be my solution but perhaps I'm using them incorrectly? Please keep in mind that I don't know all that much about node.js and Promises most of my work is just copy pasting from internet and changing small parts of it to my liking.
Output:
Got file info successfully!
file1.htm This is an htm file!
Started Writing to the file!
Got file info successfully!
file2.htm This is an htm file!
Started Writing to the file!
Got file info successfully!
file3.htm This is an htm file!
Started Writing to the file!
Got file info successfully!
file4.htm This is an htm file!
Started Writing to the file!
Got file info successfully!
someInnerDirectory is a innerDirectory
Delving deeper!
Got file info successfully!
file5.htm This is anhtm file!
Started Writing to the file!
Got file info successfully!
file6.htm This is an htm file!
Started Writing to the file!
Got file info successfully!
file7.htm This is an htm file!
Started Writing to the file!
Read file content
Read file content
Read file content
Read file content
Read file content
Read file content
Read file content
Well done on your work so far and sticking with it. Good to see you are using recursion too. You are creating a function called addLineInFile but not executing it. I did something similar a while back, check it out

JPEG File Encoding and writeFile in Node JS

I'm using http.request to download JPEG file. I am then using fs.writeFile to try to write the JPEG file out to the hard drive.
None of my JPEG files can be opened, they all show an error (but they do have a file size). I have tried all of the different encodings with fs.writeFile.
What am I messing up in this process?
Here's what the working one is showing when viewing it raw:
And here is what the bad one using fs.writeFile is showing:
Figured it out, needed to use res.setEncoding('binary'); on my http.request.
Thank you, looking to the previous response, I was able to save de media correctly:
fs.writeFile(
filepath + fileName + extension,
mediaReceived, // to use with writeFile
{ encoding: "binary" }, // to use with writeFile ***************WORKING
(err) => {
if (err) {
console.log("An error ocurred while writing the media file.");
return console.log(err);
}
}
);

error ENOENT,open '/tmp/45e85388793de' in nodejs

I am trying to save project and its file in GridFS. I want to save project first and using "_id" of project as metadata for file I want to save file. When i tried so i am getting ENOENT, open '/tmp/45e85388793de' error. here is my code
newProject.save(function (err,project) {
if (err) {
console.log('save error', err);
}
console.log("project added");
var id=poject._id;
var filepath = req.files.file.path;
var filename = req.files.file.name;
var writestream = gfs.createWriteStream({ filename: filename, metadata:id });
console.log(filepath);
fs.createReadStream(filepath)
.on('end', function() {
})
.on('error', function(err) {
console.log("error encountered"+err);//ENOENT,open error
})
.pipe(writestream);
});
Why i am getting this error and how to resolve it?
ENOENT in this context means "No such file or directory." It means the filepath you are trying to read with createReadStream does not exist.
I think you are getting this error since :
Your file is saved in a temporary location.
When you are inside the callback function your file is removed from that location and you are getting "No such file" error. Path and other variables still exists as part of js and that's why you are able to print them in console.
Solution:
Above(Outside) callback function move your file to some other permanent location using:
fs.rename(req.files.file.path, "./someKnownPath/filename");
Keep note of that location. In your callback function use the new location as path and try saving the file in gridfs. Once the file is saved you may delete it file from that location(/someKnownPath/filename).
This error was occuring for me as well. And the reason was temp directory was not in place. After I created manually and gave a try, it worked.
Now I have shifted to creating directory on the fly through node.js itself.

Resources