I've been using Unzip library in NodeJS, however, only few files are being extracted, and on each execution the files extracted are different
fs.createReadStream('test.zip').pipe(unzipper.Extract({ path: 'test' })
.on('finish', function() {
console.log("finish")
})
To fix the above issue, I had to implement the below instead of the code posted in the question
fs.createReadStream("test.zip").on("error", log).pipe(unzipper.Parse()).on("entry", function (entry) {
var isDir = entry.type === "Directory";
var fullpath = path.join(dest, entry.path);
// console.log("fullpath ", fullpath)
var directory = isDir ? fullpath : path.dirname(fullpath);
mkdirp(directory, function (err) {
if (err)
throw err;
if (!isDir) {
entry.pipe(fs.createWriteStream(fullpath));
}
})
.on("close", function (message) {
console.log("close");
resolve("/DEST")
}).on("finish", function (message) {
console.log("finish");
resolve("/DEST")
});
});
Related
I wish to create a Node.js script to copy files overwriting previous versions of the same files.
My code doesn't work:
const fs = require('fs');
var src = "srcPath/file.json";
var newPath = "newPath/file.json";
var callback = (result) => {
console.log(result);
}
copy(src, newPath, callback);
function copy(src, newPath, callback){
fs.copyFile(src, newPath, (err) => {
if (err) {
callback("Error Found:", err);
}
else {
callback("Success: "+newPath);
}
});
}
execute()
It will work, if you take this line off:
execute()
i'm trying to create a function to check exist and not exist file names so i used recursion function and proimse but unfortunately it's not resolve anything when the condition is true i tried it like that
function checkFileName(n = 0) {
return new Promise((resolve, reject) => {
fs.open("./npm"+n+".txt", 'r', function(err, res) {
if (err) {
resolve("./npm"+n+".txt")
} else {
n++
checkFileName(n)
}
})
})
}
checkFileName().then(Name => {
console.log(Name)
})
but it's not resolve anything where's the wrong ?
Any specific reason you is doing it like that ?
// Node.js program to demonstrate the
// fs.readdir() method
// Import the filesystem module
const fs = require('fs');
const path = require('path');
// Function to get current filenames
// in directory with specific extension
fs.readdir(__dirname, (err, files) => {
if (err)
console.log(err);
else {
console.log("\Filenames with the .txt extension:");
files.forEach(file => {
if (path.extname(file) == ".txt")
console.log(file);
})
}
})
Instead of reading a file at the time, you can read the hole dir and iterate over them returned files
Please change checkFileName(n) to checkFileName(n).then(resolve)
One of the solutions would be as follows:
function checkFileName(n = 0) {
return new Promise((resolve) => {
const filePath = "./npm"+n+".txt";
fs.open(filePath, 'r', function(err, res) {
if (err) {
resolve(filePath)
} else {
checkFileName(n+1).then(resolve)
}
})
})
}
checkFileName().then(Name => {
console.log(Name)
})
I am trying to download files from google cloud storage and zip them.
async function makeZippedFiles(destination, all_file_links) {
console.log("In the zip file function");
for (let i in all_file_links) {
let name = all_file_links[i]['name']
let archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.on('error', function (err) {
throw err;
});
let output = fs.createWriteStream(__dirname + `/${name}.zip`);
console.log("loop number", i);
let sourceFile = all_file_links[i]['source'];
console.log(sourceFile, name);
let remoteFile = bucket.file(sourceFile);
let read_file = remoteFile.createReadStream();
await archive.append(read_file, {name: name});
read_file
.on('error', function (err) {
console.log(err);
})
.on('response', function (response) {
console.log("writing file", name);
// console.log(response);
// Server connected and responded with the specified status and headers.
})
.on('end', function () {
console.log("file downloaded", name);
// The file is fully downloaded.
})
archive.pipe(output);
archive.finalize();
}
}
In the example on top, I am looping through all files and creating individual archives. i.e., if I download two files, I would be creating two separate archives. This works.
However, if I want to zip all the files into one archive, I get the following error:
start of central directory not found; zipfile corrupt. (please
check that you have transferred or created the zipfile in the
appropriate BINARY mode and that you have compiled UnZip properly)
The code I used is:
async function makeZippedFiles(destination, all_file_links) {
console.log("In the zip file function");
let archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.on('error', function (err) {
throw err;
});
let output = fs.createWriteStream(__dirname + `/${destination}.zip`);
for (let i in all_file_links) {
let name = all_file_links[i]['name']
console.log("loop number", i);
let sourceFile = all_file_links[i]['source'];
console.log(sourceFile, name);
let remoteFile = bucket.file(sourceFile);
let read_file = remoteFile.createReadStream();
await archive.append(read_file, {name: name});
read_file
.on('error', function (err) {
console.log(err);
})
.on('response', function (response) {
console.log("writing file", name);
// console.log(response);
// Server connected and responded with the specified status and headers.
})
.on('end', function () {
console.log("file downloaded", name);
// The file is fully downloaded.
})
archive.pipe(output);
}
archive.finalize();
}
Found the solution. It was carelessness actually.
async function makeZippedFiles(destination, all_file_links) {
console.log("In the zip file function");
let archive = archiver('zip', {
zlib: {level: 9} // Sets the compression level.
});
archive.on('error', function (err) {
throw err;
});
let output = fs.createWriteStream(__dirname + `/${destination}.zip`);
archive.pipe(output);
for (let i in all_file_links) {
let name = all_file_links[i]['name']
console.log("loop number", i);
let sourceFile = all_file_links[i]['source'];
console.log(sourceFile, name);
let remoteFile = bucket.file(sourceFile);
let read_file = remoteFile.createReadStream();
await archive.append(read_file, {name: name});
read_file
.on('error', function (err) {
console.log(err);
})
.on('response', function (response) {
console.log("writing file", name);
// console.log(response);
// Server connected and responded with the specified status and headers.
})
.on('end', function () {
console.log("file downloaded", name);
// The file is fully downloaded.
})
}
archive.finalize();
}
I moved the archive.pipe(output) before the for loop and it works.
I have download a zip file from s3 bucket then extracting the zip file
and finally upload one file to s3 bucket in Lambda function using
Node JS.But am getting the error
==> Error: EROFS: read-only file system, open './tmp/test.zip'
"Process exited before completing> request"
exports.handler = function (callback) {
downloadZipFile(params, downloadPath, function (err) {
if (err) {
callback(err);
} else {
processZipFile(downloadPath, function (err) {
if (err) {
callback(err);
} else {
callback(null);
}
});
}
});
};
function downloadZipFile(params, downloadPath, callback) {
const file = fs.createWriteStream(downloadPath);
s3.getObject(params)
.on('httpData', function (chunk) {
file.write(chunk);
})
.on('success', function () {
callback(null);
})
.on('error', function (err) {
callback(err);
})
.on('complete', function () {
file.end();
})
.send();
}
function processZipFile(filePath) {
const stats = fs.statSync(filePath)
const fileSizeInBytes = stats.size
if (fileSizeInBytes > 0) {
var srcPath = filePath;
var destPath = "./tmp";
targz.decompress({
src: srcPath,
dest: destPath
}, function (err) {
if (err) {
console.log(err);
} else {
console.log("Done!");
UploadFile();
}
});
}
}
function UploadFile() {
var body = fs.createReadStream('./tmp/SampleFile.txt')
var srcfileKey = "SampleFile.txt";
// Upload the stream
var s3obj = new AWS.S3({ params: { Bucket: bucketName, Key: srcfileKey } });
s3obj.upload({ Body: body }, function (err, data) {
if (err) {
console.log("An error occurred", err);
}
console.log("Uploaded the file at", data.Location);
})
}
You need to change the file path to just /tmp instead of ./tmp. Lambda only allows you to write to the /tmp directory.
I'm trying to move uploaded file from /tmp to home directory using NodeJS/ExpressJS:
fs.rename('/tmp/xxxxx', '/home/user/xxxxx', function(err){
if (err) res.json(err);
console.log('done renaming');
});
But it didn't work and no error encountered. But when new path is also in /tmp, that will work.
Im using Ubuntu, home is in different partition. Any fix?
Thanks
Yes, fs.rename does not move file between two different disks/partitions. This is the correct behaviour. fs.rename provides identical functionality to rename(2) in linux.
Read the related issue posted here.
To get what you want, you would have to do something like this:
var source = fs.createReadStream('/path/to/source');
var dest = fs.createWriteStream('/path/to/dest');
source.pipe(dest);
source.on('end', function() { /* copied */ });
source.on('error', function(err) { /* error */ });
Another way is to use fs.writeFile. fs.unlink in callback will remove the temp file from tmp directory.
var oldPath = req.files.file.path;
var newPath = ...;
fs.readFile(oldPath , function(err, data) {
fs.writeFile(newPath, data, function(err) {
fs.unlink(oldPath, function(){
if(err) throw err;
res.send("File uploaded to: " + newPath);
});
});
});
Updated ES6 solution ready to use with promises and async/await:
function moveFile(from, to) {
const source = fs.createReadStream(from);
const dest = fs.createWriteStream(to);
return new Promise((resolve, reject) => {
source.on('end', resolve);
source.on('error', reject);
source.pipe(dest);
});
}
This example taken from: Node.js in Action
A move() function that renames, if possible, or falls back to copying
var fs = require('fs');
module.exports = function move (oldPath, newPath, callback) {
fs.rename(oldPath, newPath, function (err) {
if (err) {
if (err.code === 'EXDEV') {
copy();
} else {
callback(err);
}
return;
}
callback();
});
function copy () {
var readStream = fs.createReadStream(oldPath);
var writeStream = fs.createWriteStream(newPath);
readStream.on('error', callback);
writeStream.on('error', callback);
readStream.on('close', function () {
fs.unlink(oldPath, callback);
});
readStream.pipe(writeStream);
}
}