I have ex1.js and ex2.js. And in ex2.js I want to get variable which one is in ex1.js
I can read the file, but I want to get exactly this var Value.
var fs = require('fs');
var readMe = fs.readFileSync('path', 'utf8');
console.log(readMe);
As long as they are both JavaScript files this is how it is generally done.
// in the js2 file
exports.var_js2 = 15;
// in the js1 file
var_js2 = require('./js2').var_js2
Related
Hi i am trying to read a file and i am having trouble with the fileReader readAsArrayBuffer function in nodejs.
var FileReader = require("filereader");
let p12_path = __dirname + "/file.p12";
var p12xxx = fs.readFileSync(p12_path, "utf-8");
var reader = new FileReader();
reader.readAsArrayBuffer(p12xxx);//The problem is here
reader.onloadend = function() {
arrayBuffer = reader.result;
var arrayUint8 = new Uint8Array(arrayBuffer);
var p12B64 = forge.util.binary.base64.encode(arrayUint8);
var p12Der = forge.util.decode64(p12B64);
var p12Asn1 = forge.asn1.fromDer(p12Der);
............
}
-------The error
Error: cannot read as File: "0�6�\.............
You are reading a PDF file which is not a text based format and should not have an encoding specified. As per the fs docs "If the encoding option is specified then this function returns a string" but because its mostly a binary file its reading invalid UTF8 characters. When you exclude the encoding it should give you a Buffer object instead which is what you most likely want.
According to the npm filereader Doc, the reader created with fs.readFileSync(p12_path, "utf-8"); needs to get a path of a file in the utf-8 encoding, otherwise it cannot read it.
The printed out "0�6�\............. shows the file is obviously not in utf8 and therefor not readable.
I would like to know how exactly can I manipulate the output of my Gulp plugin so, for example, no matter how many files are passed to the plugin, it will wrap the output with a string. Currently I cannot know when does the last file is done.
The super simplified example below will iterate on 3 files and will create a new file named output.js and in it there will be three times the string xxx (xxxxxxxxx).
I would like the plugin itself to wrap the contents so the output will
be: +xxxxxxxxx+.
How can I do this?
Thanks!
Gulpfile
var gulp = require('gulp');
var concat = require('gulp-concat');
var foo = require('./index');
gulp.task('default', function() {
gulp.src([a.html, b.html, c.html])
.pipe(foo())
.pipe(concat('output.js'))
.pipe(gulp.dest('./test/output'))
});
The most basic gulp plugin (index.js):
var through2 = require('through2'),
gutil = require('gulp-util');
var PLUGIN_NAME = 'foo';
module.exports = function( options ){
// through2.obj(fn) is a convenience wrapper around
// through2({ objectMode: true }, fn)
return through2.obj(function( file, enc, callback ){
file.contents = new Buffer( 'xxx' );
this.push(file);
callback();
});
}
I understand the files are currently simply returned modified, but what I don't understand is how to append text and return the concatenated result that I want, while keeping it OK with Gulp working standards.
The "real" plugin should actually wrap the files results with:
var foo = { FILES_CONTENT }
where FILES_CONTENT will actually be a a concatenated string of all the files:
"file_name" : "file_content",
"file_name" : "file_content",
...
I would make the following changes to your gulpfile.js:
var gulp = require('gulp');
var foo = require('./index.js');
gulp.task('default', function() {
return gulp.src(['a.html', 'b.html', 'c.html'])
.pipe(foo({fileName:'output.js', varName:'bar'}))
.pipe(gulp.dest('./test/output'))
});
Since your foo() plugin itself will concatenate all the files, there's no need to use gulp-concat at all. Instead your plugin should accept an option fileName that provides the name of the generated file. I've also added another option varName that will provide the name of the var in the output file.
I'll assume that a.html, b.html and c.html are simple HTML files, something like this:
<h1 class="header">a</h1>
As you've already realized you need to concat all the files in the plugin itself. That's not really difficult however and doesn't require a lot of code. Here's a index.js which does exactly that:
var through2 = require('through2'),
gutil = require('gulp-util'),
path = require('path'),
File = require('vinyl');
var PLUGIN_NAME = 'foo';
module.exports = function(options) {
var files = { };
var outputFile = null;
return through2.obj(function(file, enc, callback){
outputFile = outputFile || file;
var filePath = path.relative(file.base, file.path);
files[filePath] = file.contents.toString();
callback();
}, function(callback) {
outputFile = outputFile ? outputFile.clone() : new File();
outputFile.path = path.resolve(outputFile.base, options.fileName);
outputFile.contents = new Buffer(
'var ' + options.varName + ' = ' +
JSON.stringify(files, null, 2) + ';'
);
this.push(outputFile);
callback();
});
}
Since you want to output a key/value mapping from file names to file contents our transformFunction just stores both of those things in a regular JavaScript object files. None of the input files themselves are emitted. Their names and contents are just stored until we have all of them.
The only tricky part is making sure that we respect the .base property of each file as is customary for gulp plugins. This allows the user to provide a custom base folder using the base option in gulp.src().
Once all files have been processed through2 calls the flushFunction. In there we create our output file with the provided fileName (once again making sure we respect the .base property).
Creating the output file contents is then just a matter of serializing our files object using JSON.stringify() (which automatically takes care of any escaping that has to be done).
The resulting ./test/output/output.js will then look like this:
var bar = {
"a.html": "<h1 class=\"header\">a</h1>\n",
"b.html": "<h1 class=\"header\">b</h1>\n",
"c.html": "<h1 class=\"header\">c</h1>\n"
};
You should use the gulp pipeline technique (standard).
This means that you can use the gulp-insert package in order
to add the string xxx.
var insert = require('gulp-insert');
.pipe(insert.append('xxx')); // Appends 'xxx' to the contents of every file
You can also prepend, append and wrap with this package and it support of course the gulp standards.
So the full example will be:
var gulp = require('gulp');
var concat = require('gulp-concat');
var foo = require('./index');
var insert = require('gulp-insert');
gulp.task('default', function() {
gulp.src([a.html, b.html, c.html])
.pipe(foo()
.pipe(insert.append('xxx'))
.pipe(concat('output.js'))
.pipe(gulp.dest('./test/output'))
});
How do you get fullpath from a file descriptor in node?
var fs = require('fs')
var fd = fs.openSync('package.json', 'r')
console.log(fd) // 10
console.log(get_file_path_from_fd(fd)) // HELP
Edit: I have found this
> fs.openSync('.', 'r')
10
> fs.readlinkSync('/proc/self/fd/10')
'/home/alfred/repos/test
but i didn't find proc folder in Mac
Considering that you're loading a file that is in the same directory as the script, you could just use the __dirname global to find the current directory.
https://nodejs.org/docs/latest/api/globals.html#globals_dirname
In fact, I prefer to load files using __dirname as part of the path for the fs module as a good practice. For example, this is from a Discord bot I have...
var tokenJSON = require( __dirname + '/json/discord_token.json');
Edit: So to put this into the answer itself; your fd variable contains the data that was loaded from the file, but it is completely disconnected from the file it came from originally. If you are being given an arbitrary file to load and you would like to have the path, we need to know more about how that file is being provided to you. When the file is given to you there should be a path included (so the script can locate the data!) and that is what you want. If there is no path like in your example, then that means the relative paths are the same and it's the current directory.
const {execSync} = require('child_process')
const fs = require('fs')
var fd = fs.openSync('package.json', 'r')
var fullpath = execSync(`lsof -a -p ${process.pid} -d ${fd}`).toString().split('\n')[1].split(/\s+/).pop()
console.log(fullpath) // result: /fullpath/package.json
There is an archive format gzip. There are json files. We need to get each file in turn , to do with it and what is written in the other gzip. I realized that I need to use the standard library createReadStream and zlib.
Well, following the example from https://nodejs.org/api/zlib.html#zlib_examples the following process could be done for a single gzipped file:
var unzip = zlib.createUnzip();
var fs = require('fs');
var inp = fs.createReadStream('input.json.gz');
var out = fs.createWriteStream('output.json');
inp.pipe(unzip).pipe(out);
However if there are multiple files within a gzip, I am not sure how one would go about doing that. I could not find documentation to do that and the only way I found that multiple files could be unzipped from a gzip file in node is if they were tar'd first. A process for unzipping tar.gz in node can be found here. Following that example, one could do something like this:
var unzip = zlib.createUnzip();
var fs = require('fs');
var tar = require('tar-fs');
var inp = fs.createReadStream('input.tar.gz');
var out = './output'; // output directory
inp.pipe(unzip).pipe(tar.extract(out));
I have several arrays that contain data that I would like to export, each array to a txt file, in order to be analyzed using MATLAB.
Let's say my array is:
var xPosition = [];
// some algorithm that adds content to xPosition
// TODO: export array into a txt file let's call it x_n.txt
It would be great to store each element of an array per line.
I have found a guide for the solution to my question in this post. The following code is what I ended up using:
var fs = require('fs');
var xPosition = [];
// some algorithm that adds content to xPosition
var file = fs.createWriteStream('./positions/x_n.txt');
file.on('error', function(err) { /* error handling */ });
xPosition.forEach(function(v) { file.write(v + '\n'); });
file.end();
The solution you found works, but here's how I'd have done it:
var fs = require('fs');
var xPosition = [1,2,3]; // Generate this
var fileName = './positions/x_n.txt';
fs.writeFileSync(fileName, xPosition.join('\n'));
This uses node's synchronous file writing capability, which is ideal for your purposes. You don't have to open or close file handles, etc. I'd use streams only if I had gigabytes of data to write out.