My goal is to copy a .asar file using NodeJS's file system. I have a file path and need to copy the .asar file to another destination.
Example of file path:
C:/Users/redacted/Documents/ExampleElectronApp/example.asar
I need to copy this file (archive) to another directory.
Previously I copied this with a walk and copy function but it ended up creating a directory named filename.asar instead of an actual file named filename.asar This function seems to work on all other files and folders correctly (except for .asar archives).
What I have tried
1: Copying the archive using read and write streams.
var fs = require('fs');
fs.createReadStream('C:/Users/redacted/Documents/redacted/node_modules/electron-prebuilt/dist/resources/default_app.asar').pipe(fs.createWriteStream('C:/Users/redacted/Desktop/test.asar'));
This ended up giving an error:
It says this file is not found but I can assure you, copying the source path does lead me to the .asar file.
2: Using the asar requirement to create a package.
var fs = require('fs');
var asar = require('asar');
asar.createPackage('C:/Users/redacted/Documents/redacted/node_modules/electron-prebuilt/dist/resources/default_app.asar', 'C:/Users/redacted/Desktop/test.asar', function() {
console.log('done.');
})
While this function does log the correct 'done' message, it seems that the copy was unsucessful. No new files or folders are shown in the destination directory.
Thank you all in advance.
Maybe you can disable the support for asar in fs module, like that:
process.noAsar = true
Here is the document: https://electronjs.org/docs/tutorial/application-packaging#treating-an-asar-archive-as-a-normal-file
The way I ended up succeeding this was by renaming the .asar files then renaming them back after I copied them. Here is my final source for copying .asar files:
var filePathSource = path.join(__dirname, 'default_app.asar')
var filePathTarget = path.join(__dirname, 'test.txt')
var filePathSourceNew = filePathSource.slice(0, -5) + ".txt";
console.log(filePathSourceNew);
fs.rename(filePathSource, filePathSourceNew, function(err) {
if (err) {
console.log('ERROR: ' + err);
} else {
var stream = fs.createReadStream(filePathSourceNew).pipe(fs.createWriteStream(filePathTarget));
stream.on('finish', function () {
fs.rename(filePathSourceNew, filePathSource, function(err) {
if (err) {
console.log('ERROR: ' + err);
} else {
fs.rename(filePathTarget, filePathTarget.slice(0, -4) + ".asar", function(err) {
if (err) {
console.log('ERROR: ' + err);
} else {
console.log("Complete Asar Copy");
}
});
}
});
});
}
});
You can use "path" of NODEJS module.
const path = require('path')
filePathSource = path.join(__dirname, 'a', 'b', 'c', 'source.asar')
filePathTarget = path.join(__dirname, 'a', 'b', 'c', 'target.asar')
fs.createReadStream(filePathSource).pipe(fs.createWriteStream(filePathTarget));
Related
I have a zip file in S3 that contains several txt files and an image.
I am needing to grab that file from S3 and then unzip all of the files with their original filenames into the tmp directory in lambda using node.js
I am not super familiar with node and while the S3 part of getting the file works fine, I frankensteined the zip part together from the interwebs and was wondering if someone could help me get it right.
const zlib = require('zlib');
const fs = require('fs');
try {
const s3Object = await s3
.getObject({
Bucket: 'mybucket',
Key: `zip/${myzipfilename}`
})
.promise();
console.log("s3 zip fetched");
// write file to tmp
writeFileSync(`/tmp/${myzipfilename}`, s3Object.Body);
//unzip files
const fileContents = fs.createReadStream(`/tmp/${myzipfilename}`);
//I am quite sure this part is incorrect and is currently giving me an error
const writeStream = fs.createWriteStream(`./tmp/${filename.slice(0, -3)}`);
const unzip = zlib.createGunzip();
fileContents.pipe(unzip).pipe(writeStream);
}
End result within the lambda tmp directory would be something like:
/tmp/myoriginaltxtfilename.txt
/tmp/myoriginaltxtfilename2.txt
/tmp/myoriginaltxtfilename3.txt
/tmp/myoriginalimagefilename.png
I don't need to rezip anything.
You have a couple of issues in your code. First of all, at this line:
const writeStream = fs.createWriteStream(`./tmp/${filename.slice(0, -3)}`);
filename is not defined.
Second, you're using nodejs zlib to extract a .zip file which contains multiple files which won't work. zlib module is only for streams and buffers that represent singular resources not zip archives. You could use node-stream-zip instead.
Let's say you've successfully downloaded the zip file from S3 and saved in /tmp directory. Using node-stream-zip extracting the files from the zip file without unzipping it would look something like this:
const StreamZip = require('node-stream-zip');
const zip = new StreamZip({
file: `/tmp/${myzipfilename}`,
storeEntries: true
});
zip.on('ready', () => {
console.log('All entries read: ' + zip.entriesCount);
});
zip.on('entry', (entry) => {
if ('/' === entry.name[entry.name.length - 1]) {
console.log('[DIR]', entry.name);
return;
}
console.log('[FILE]', entry.name);
zip.stream(entry.name, (err, stream) => {
if (err) {
console.log('Error: ', err.toString());
return;
}
stream.on('error', (err) => {
console.log('[ERROR]', err);
return;
});
stream.pipe(fs.createWriteStream(`/tmp/${entry.name}`));
});
});
In a node application, I wish to download a zip file that contains pdfs downloaded from various urls on the internet (where if I type the url into a browser, it just directs me to download a pdf). I've been using the archiver module which is documented on github at https://github.com/archiverjs/node-archiver, and the official documentation is at https://www.archiverjs.com/.
I'm stuck at the part where it gives the following examples for adding files to the zip file.
// append a file from stream
var file1 = __dirname + '/file1.txt';
archive.append(fs.createReadStream(file1), { name: 'file1.txt' });
// append a file from string
archive.append('string cheese!', { name: 'file2.txt' });
// append a file from buffer
var buffer3 = Buffer.from('buff it!');
archive.append(buffer3, { name: 'file3.txt' });
// append a file
archive.file('file1.txt', { name: 'file4.txt' });
// append files from a sub-directory and naming it `new-subdir` within the archive
archive.directory('subdir/', 'new-subdir');
// append files from a sub-directory, putting its contents at the root of archive
archive.directory('subdir/', false);
// append files from a glob pattern
archive.glob('subdir/*.txt');
Unfortunately, it seems just pasting urls into the first parameter of .append or .directory doesn't work - would anyone know how I can add downloadable files (that are online) into the zip file?
sure, using download-pdf first something like that
var download = require('download-pdf')
var fs = require('fs');
var archiver = require('archiver');
var output = fs.createWriteStream('./example.zip');
var archive = archiver('zip', {
gzip: true,
zlib: { level: 9 } // Sets the compression level.
});
var pdf = "http://www.consejoconsultivoemt.cl/wp-content/uploads/2018/12 /Presentaci%C3%B3n-Lineamientos-Estrat%C3%A9gicos-de-Corfo.pdf"
var pdf2 = "https://www.biobiochile.cl/static/tarifas.pdf"
var options = {
directory: "./files/",
filename: "first.pdf"
}
var options2 = {
directory: "./files/",
filename: "second.pdf"
}
download(pdf, options, function (err) {
if (err) throw err
console.log("meow")
})
download(pdf2, options2, function (err) {
if (err) throw err
console.log("meow2")
})
archive.on('error', function (err) {
throw err;
});
// pipe archive data to the output file
archive.pipe(output);
// append files
archive.file('./files/first.pdf', { name: 'first.pdf' });
archive.file('./files/second.pdf', { name: 'second.pdf' });
archive.finalize();
I'm looking for a way to copy a folder's content to another folder or even replace the folder if it exists with the old one but preserve its name.
Thanks for helping.
First install fs-extra module in your project by doing npm install fs-extra then follow the steps below:
import the following
var fs = require('fs');
var fs_Extra = require('fs-extra');
var path = require('path');
// Here you declare your path
var sourceDir = path.join(__dirname, "../working");
var destinationDir = path.join(__dirname, "../worked")
// if folder doesn't exists create it
if (!fs.existsSync(destinationDir)){
fs.mkdirSync(destinationDir, { recursive: true });
}
// copy folder content
fs_Extra.copy(sourceDir, destinationDir, function(error) {
if (error) {
throw error;
} else {
console.log("success!");
}
});
NB: source and destination folder name should not be the same.
First check if the destination path exists if not create it,
then you could use fs-extra for the copying of files/subdirectories.
var fs = require('fs');
var fse = require('fs-extra');
var sourceDir = '/tmp/mydir';
var destDir = '/tmp/mynewdir';
// if folder doesn't exists create it
if (!fs.existsSync(destDir)){
fs.mkdirSync(destDir, { recursive: true });
}
//copy directory content including subfolders
fse.copy(sourceDir, destDir, function (err) {
if (err) {
console.error(err);
} else {
console.log("success!");
}
});
When I run the following code a blank file gets created with the correct name. I clearly dont want a blank file.
I know the path is correct because when i make it purposely incorrect it fails (obviously)
const path = require('path');
const fse = require('fs-extra');
const OUTPUT_PATH = 'js/libs/';
const _NODE_MODULES = 'node_modules/';
const filePath = `${_NODE_MODULES}tooltipster/dist/js/tooltipster.bundle.min.js`;
fse.copy(path.join(__dirname, filePath), path.join(__dirname, `${OUTPUT_PATH}/something.js`), err => {
if (err) {
console.log(err);
process.exit(1)
}
console.log('Copy complete');
process.exit(0);
})
Output of this is
Copy Complete
But the file is blank as I previously stated. Any idea what I'm doing wrong here?
I've modified Your code and checked on my PC.
So result: http://joxi.ru/ZrJEEJh1KXw1Aj
Checkout this code:
const path = require('path');
const fs = require('fs-extra');
let sourceFile = path.join(__dirname, 'node_modules', 'tooltipster/dist/js/tooltipster.bundle.min.js');
let destinationFile = path.join(__dirname, 'js/libs', 'something.js');
fs.copy(sourceFile, destinationFile, err => {
if (err) {
return console.error(err);
}
console.log('Copy complete');
});
if it fail again so, be sure that there is no issue with code.
check Your filesystem maybe there is some open file limits, permission problems or no free space.
also I can guess that the source file is empty, so do:
cat node_modules/tooltipster/dist/js/tooltipster.bundle.min.js
Your call to process.exit interfered/aborted before it could finish. Don't need to call process.exit. It will exit when everything is done.
For some reason when I try to write a file on my localhost (windows 7) the writestream won't open. On a linux machine, it works fine. Is there some type of permissions I need to add in windows?
I'm already running as administrator.
Here is the current method.
// Mainfunction to recieve and process the file upload data asynchronously
var uploadFile = function(req, targetdir,callback) {
var total_uploaded = 0
,total_file;
// Moves the uploaded file from temp directory to it's destination
// and calls the callback with the JSON-data that could be returned.
var moveToDestination = function(sourcefile, targetfile) {
moveFile(sourcefile, targetfile, function(err) {
if(!err)
callback({success: true});
else
callback({success: false, error: err});
});
};
// Direct async xhr stream data upload, yeah baby.
if(req.xhr) {
var fname = req.header('x-file-name');
// Be sure you can write to '/tmp/'
var tmpfile = '/tmp/'+uuid.v1();
total_file = req.header('content-length');
// Open a temporary writestream
var ws = fs.createWriteStream(tmpfile);
ws.on('error', function(err) {
console.log("uploadFile() - req.xhr - could not open writestream.");
callback({success: false, error: "Sorry, could not open writestream."});
});
ws.on('close', function(err) {
moveToDestination(tmpfile, targetdir+fname);
});
// Writing filedata into writestream
req.on('data', function(data,t,s) {
ws.write(data,'binary',function(r,e){
total_uploaded = total_uploaded+e;
var feed = {user:'hitesh',file:fname,progress:(total_uploaded/total_file)*100};
require('./../../redis').broadCast(JSON.stringify(feed))
});
});
req.on('end', function() {
ws.end();
});
}
// Old form-based upload
else {
moveToDestination(req.files.qqfile.path, targetdir+req.files.qqfile.name);
}
};
As your code is running fine on Linux it must be something specific to Windows.
var tmpfile = '/tmp/'+uuid.v1();
might be your problem. The folder/path structure on windows is different. Try using the path module and change your code to
var path = require('path');
var tmpfile = path.join('tmp', uuid.v1());
The same goes probably to your parameter targetdir.
see this related question.
The problem is with the directory. Unless you have a C:\tmp directory (assuming you're running node from the C drive), it doesn't have anywhere to write the tmp file.
You could either create a C:\tmp directory or modify the line
var tmpfile = '/tmp/'+uuid.v1();
to something like
var tmpfile = __dirname + '/tmp/'+ uuid.v1();
Note: requires a directory something like C:\mynodeproject\tmp