I am trying to upload file to FTP server, but I see few entries but rest are skipped, even no error is generating. I don't know where exactly I am getting wrong.is it a synchronization issue or an issue from the package itself? I even used jsFtp package which can also put a buffer in the server but not works as expected. below are my code and output.
const unzipper = require("unzipper");
const ftp = require("basic-ftp");
const client = new ftp.Client();
await client.access({...options});
const zip = fs.createReadStream(path.join(filePath, `code.zip`)).pipe(unzipper.Parse({ raw: true, forceStream: true}));
for await (const entry of zip) {
await client.cd("/");
const type = entry.type; // 'Directory' or 'File'
const size = entry.vars.uncompressedSize; // There is also compressedSize;
let fileArray = entry.path.split("/");
if(size > 0 ) {
let fileName = fileArray.pop();
let dir = fileArray.splice(1).join("/");
await client.uploadFrom(entry, dir + "/" + fileName);
}
if(type === 'Directory') {
let dir = fileArray.splice(1).join("/");
await client.ensureDir(`${dir}`);
// await client.clearWorkingDir();
}
}
console.log("Entry Read Finished");
.gitignore
LICENSE
README.md
app/
app/bootstrap.php
app/config.php
composer.json
console.php
src/
src/SuperBlog/
src/SuperBlog/Command/
src/SuperBlog/Command/ArticleDetailComm
src/SuperBlog/Controller/
src/SuperBlog/Controller/ArticleControl
src/SuperBlog/Controller/HomeController
src/SuperBlog/Model/
src/SuperBlog/Model/Article.php
src/SuperBlog/Model/ArticleRepository.p
src/SuperBlog/Persistence/
src/SuperBlog/Persistence/InMemoryArtic
src/SuperBlog/Views/
src/SuperBlog/Views/article.twig
src/SuperBlog/Views/home.twig
src/SuperBlog/Views/layout.twig
web/
web/.htaccess
web/index.php
Creating Directory /
Uploading .gitignore
File: '' '.gitignore'
Uploading LICENSE
File: '' 'LICENSE'
Uploading README.md
File: '' 'README.md'
Creating Directory /app/
Uploading bootstrap.php
File: 'app' 'bootstrap.php'
Uploading config.php
File: 'app' 'config.php'
Entry Read Finished
can any one suggest what wrong with the code. Zip is perfectly fine no error with that.
You can simplify this using basic-ftp's support for uploadFromDir which will automatically create the required folder structure and upload the file contents. Here's a working example (tested successfully with the public ftp-server https://dlptest.com/ftp-test/):
const unzipper = require("unzipper");
const ftp = require("basic-ftp");
const fs = require('fs');
const path = require('path');
(async () => {
try {
await fs.createReadStream(path.resolve('./testzip.zip'))
.pipe(unzipper.Extract({path: './tmp-extract'}))
.promise();
const client = new ftp.Client();
await client.access({
host: "<host>",
user: "<user>",
password: "<password>",
});
await client.uploadFromDir("./tmp-extract");
console.log("upload successful");
} catch (err) {
console.log(err);
}
})();
Related
i have a problem with node.js, i have a script that download from a sftp server some zip file, every zip file are a compress csv file. My task is to download the file, unzip it and delete the zip file.
I have already a working script to download all the files from the sftp server. Now i would like to add to this script a function to unzip all the files and store only the csv files.
For doing that i started to work to a local script that open directly a single file and it had tried to unzip it. But i can't figured out how to do.
This is the portion of code that i wrote, after this code starts working I would like to put it in a helper function class where i can call from my script after the process of the download from sftp was completed.
Anyone can help me to understand on what i am wrong?
const logger = require("./utils/logger");
const path = require("path");
const fs = require("fs");
const unzipper = require("unzipper");
const { LOCAL_IN_DIR_PATH } = require("./utils/consts");
const unzipAll = async (pathToSearch) => {
console.log("unzipAll");
try {
const compFiles = fs.readdirSync(pathToSearch).forEach(function (file) {
if (file.endsWith(".zip")) {
const path = LOCAL_IN_DIR_PATH + `/${file}`;
fs.createReadStream(path).pipe(
unzipper.Extract({ path: path })
);
}
});
} catch (err) {
console.log(err);
}
};
const run = async () => {
try {
const LOCAL_IN_DIR_PATH = path.resolve(__dirname, "IN");
const result = await unzipAll(LOCAL_IN_DIR_PATH);
console.log(result);
} catch (error) {
console.log(error);
}
};
run();
I was trying to watch a certain directory and when a new file is added to that directory I want to rename the file but it's not working. The problem is the directory watching part works fine but when I rename the newly added file the name I am giving it is iterated and giving it the wrong name. For Example, if the new name I'm assigning is thisIsName when it gets renamed it becomes thisIsNamethisIsNamethisIsNamethisIsName. How can I make it so that the rename is the assigned name without any iteration? Any help is appreciated. Thanks in advance.
const fs = require("fs");
const chokidar = require('chokidar');
const watcher = chokidar.watch('filePath', {
ignored: /(^|[\/\\])\../,
persistent: true
});
function yyyymmdd() {
var now = new moment();
return now.format("YYYYMMDD");
}
function hhmmss() {
var now = new moment();
return now.format("HHmmss");
}
const log = console.log.bind(console);
//watching a certain directory for any update to it
watcher
.on('add', path => {
const newFileName = "filePath\\" + yyyymmdd() + hhmmss() + path
//trying to rename the file, but its not working because newFileName is somehow getting looped and having multiple iterations of the DATE and TIME in the new name when getting renamed. Example of what the product looks like is included above in the question.
fs.renameSync(path, newFileName);
})
.on('change', path => {
log(`File ${path} has been changed`)
})
.on('unlink', path => {
log(`File ${path} has been removed`)
})
I've done some small changes in your code and it worked for me for any file formats (for unformatted files as well). Anyway, use as you want. The only thing you've missed, was the usage of "path":
const moment = require('moment');
const fs = require('fs');
const chokidar = require('chokidar');
const path = require('path');
const log = console.log.bind(console);
function formattedDate() {
return moment().format('YYYYMMDDHHmmss');
}
// here I've used some folder with name "folder" in the same directory as this file
const filePath = path.join(__dirname, `./folder`);
const watcher = chokidar.watch(filePath, {
ignored: /(^|[\/\\])\../,
persistent: true
});
watcher
.on('add', addedFilePath => {
const fileExt = path.extname(addedFilePath);
const newFilePath = path.join(__dirname, `./folder/${formattedDate()}${fileExt}`);
fs.renameSync(addedFilePath, newFilePath);
})
.on('change', changedFilePath => {
log(`File ${changedFilePath} has been changed`);
})
.on('unlink', removingFilePath => {
log(`File ${removingFilePath} has been removed`);
});
Here is the stuff:
so i'm trying to upload an image to firebase storage (a local photo from the source file directory - path: same path as .js). The problem is that in firebase storage the image appears to be corrupted and every picture have 9 bytes firebase storage image . The authentication and firestore works perfectly, this is my configuration file: firebase config file and this is the code:
const uploadPhoto = async() => {
// console.log(image);
// const uploadUri = image;
// let filename = uploadUri.substring(uploadUri.lastIndexOf('/') + 1);
const metadata = {
//path: '../../firebase_image.jpg',
contentType: 'image/jpeg'
};
const photog = `./photo.jpg`;
console.log(photog);
console.log(uploadUri);
const storageRef;
storageRef=ref(storage, 'photogra.jpg');//filename+
uploadBytes(storageRef, photog, metadata).then((snapshot) => {
console.log('Uploaded a blob or file!');
});
}
I hope you're fine!
I got the information from this amazing tutorial and worked fine for me
https://www.youtube.com/watch?v=H-yXO46WDak&lc=z22ph5dhssfqufkcxacdp430segloszlmvuqlp1seplw03c010c
Try with this:
const uploadImageFirebase = async () =>{
const nameImage = new Date().toISOString();
const img = await fetch(image);
const bytes = await img.blob();
try {
await uploadBytes(ref_con, bytes);
} catch (error) {
console.log(error);
} finally {
//
}
};
If you check the contents of your 9-byte file, it'll like be "photo.jpg".
Since you're passing "photo.jpg" to uploadBytes, it uploads that strings as the contents of the new file. It does not know how to load the file at that path.
You will either need to pass a local File or Buffer (which you'll usually get from a file picker or something like that), or load the data from the file yourself and pass the contents to uploadBytes.
Im looking for node package to unzip password protected zip files.
As of now i have looked here
But the above provides functions to open the files and parses the files one by one. I require to just extract the files at one place.
If I understand your question correctly, and if you still are looking for an answer
In the same package check out these functions:
Unzipper File Wise Extracting Options
Method specific extract
These can help you extract the files (all or selected ones) at the desired location
If you want to unzip a password-protected file from a directory -
const unzipper = require("unzipper");
const unzipAndUnlockZipFile = async (filepath, password) => {
try {
const zipDirectory = await unzipper.Open.file(filepath); // unzip a file
const file = zipDirectory.files[0]; // find the file you want
// if you want to find a specific file by path
// const file = zipDirectory.files.find((f) => f.path === "filename");
const extracted = await file.buffer(password); // unlock the file with the password (password is optional, do not pass this if the file is not password-protected)
console.log(extracted.toString()); // file content
} catch (e) {
console.log(e);
}
};
const zipFilePath = "./src/application.zip";
const password = "1234";
unzipAndUnlockZipFile(zipFilePath, password);
If you want to unzip and extract the file from a buffer then the solution would look like below -
const unzipper = require("unzipper");
const unzipAndUnlockZipFileFromBuffer = async (zippedFileBase64, password) => {
try {
const zipBuffer = Buffer.from(zippedFileBase64, "base64"); // Change base64 to buffer
const zipDirectory = await unzipper.Open.buffer(zipBuffer); // unzip a buffered file
const file = zipDirectory.files[0]; // find the file you want
// if you want to find a specific file by path
// const file = zipDirectory.files.find((f) => f.path === "filename");
const extracted = await file.buffer(password); // unlock the file with the password (password is optional, do not pass this if the file is not password-protected)
console.log(extracted.toString()); // file content
} catch (e) {
console.log(e);
}
};
const zippedFileBase64 = "{{BASE64}}";
const password = "1234";
unzipAndUnlockZipFileFromBuffer(zippedFileBase64, password);
I am using zlib along with fstream now for zipping and sending to the client, Now I need to unzip an archive(which may contains sub folders) into a folder maintaining the folder structure. How do I do that?
There are plenty of node modules that can do this for you. One of them is node-unzip. You can extract a .zip file to a directory as simple as this.
fs.createReadStream('path/to/archive.zip').pipe(unzip.Extract({ path: 'output/path' }));
Further reading: https://github.com/EvanOxfeld/node-unzip
Rar is a closed-source software. The only way you can do it -- install command-line rar (rar.exe or linux version of rar, which is available on most platforms) and call it by means of this:
var exec = require('child_process').exec;
exec("rar.exe x file.rar", function (error) {
if (error) {
// error code here
} else {
// success code here
}
});
you can use this amazing module http://node-machine.org/machinepack-zip
for uncompress a zip file with directory structure inside zip
var Zip = require('machinepack-zip');
// Unzip the specified .zip file and write the decompressed files/directories as contents of the specified destination directory.
Zip.unzip({
source: '/Users/mikermcneil/stuff.zip',
destination: '/Users/mikermcneil/my-stuff',
}).exec(callbackSuccess, callbackFail );
for download remote file and unzip you can use this code:
var fs = require('fs');
var unzip = require("unzip2");
var tar = require('tar');
var zlib = require('zlib');
var path = require('path');
var mkdirp = require('mkdirp'); // used to create directory tree
var request = require("request");
var http = require('http');
var zip = require("machinepack-zip");
for (var i = 0; i < _diff.length; i++) {
request(constants.base_patch +"example.zip")
request = http.get({ host: 'localhost',
path: '/update/patchs/' + "example.zip",
port: 80,
headers: { 'accept-encoding': 'gzip,deflate' } });
request.on('response', (response) => {
var output = fs.createWriteStream(__dirname + "/tmp/" +"example.zip");
switch (response.headers['content-encoding']) {
// or, just use zlib.createUnzip() to handle both cases
case 'gzip':
response.pipe(zlib.createGunzip()).pipe(unzip.Extract({ path: __dirname }));
break;
case 'deflate':
response.pipe(zlib.createInflate()).pipe(unzip.Extract({ path: __dirname }));
break;
default:
response.pipe(output);
break;
}
})
request.on('close', function(){
zip.unzip({
source: __dirname + "/tmp/" + "example.zip",
destination: __dirname,
}).exec({
error: function (err){
alert("error")
},
success: function (){
//delete temp folder content after finish uncompress
},
});
})
}
note : remove unnecesary modules.
Use node js decompress-zip, first install it with npm:
npm install decompress-zip --save
Then you have to require it:
const DecompressZip = require('decompress-zip');
Finally you can use it in the following way:
let unzipper = new DecompressZip( absolutePathFileZip );
The directory to be extracted must be specified:
unzipper.extract({
path: pathToExtract
});
Additional you can use the following for better control:
Handle Error:
unzipper.on('error', function (err) {
console.log('event error')
});
Notify when everything is extracted
unzipper.on('extract', function (log) {
console.log('log es', log);
});
Notify "progress" of the decompressed files:
unzipper.on('progress', function (fileIndex, fileCount) {
console.log('Extracted file ' + (fileIndex + 1) + ' of ' + fileCount);
});
If anyone looking for async-await way syntax:
const request = require('request');
const unzip = require('unzip');
await new Promise(resolve =>
request('url')
.pipe(fs.createWriteStream('path/zipfilename'))
.on('finish', () => {
resolve();
}));
await new Promise(resolve =>
fs.createReadStream('path/filename')
.pipe(unzip.Extract({ path: 'path/extractDir }))
.on('close', ()=>{
resolve()
}));