Cannot find module when attempting to automatically import node.js modules - node.js

I have a directory tmp that has 3 test node.js modules [mod0.js, mod1.js, mod2.js].
I am attempting to write a function in order to import these three modules into an array and then return said array. I can drop to REPL and can import each file using var x = require("./tmp/mod0"); etc without any issue.
When I attempt to run the following function though to automate this, I receive the error [Error: Cannot fine module './tmp/mod0'].
var _importFiles = function(path, files){
var moduleList = []
, trimmedName;
files.forEach(function (element, index, array){
if (_fs.lstatSync(path + "/" + element).isFile()){
trimmedName = element.substring(0, (element.length - 3));
moduleList.push(require("./" + path + "/" + trimmedName));
}
});
return moduleList;
};
I am passing in 'tmp' for the path parameter and the output of fs.readdirSync(path) for the files parameter.
If I check process.cwd(); within the if block, it matches that of the REPL console.
I'm trying to figure out why it works when I manually do it in REPL but not automated.

I modified the code slightly to this:
var _fs = require('fs');
var path = process.cwd() + '/tmp'
var _importFiles = function(path, files){
var moduleList = [], trimmedName;
files.forEach(function (element, index, array){
if (_fs.lstatSync(path + "/" + element).isFile()){
trimmedName = element.substring(0, (element.length - 3));
moduleList.push(require("./" + path + "/" + trimmedName));
}
});
return moduleList;
};
var imports = _importFiles('./tmp', _fs.readdirSync(path));
console.log(imports);
Which gives me:
$ node import.js
[ 'imported mod0 automatically', 'imported mod1 automatically' ]
The mod files are simple module.exports = "imported mod(x) automatically";
So now my return list has an array. Also; Make sure your directory has read permissions (which im sure it does)

Related

Passing a variable between pipes in Gulp 3.9.1

Using gulp 3.9.1
I am attempting to return a bunch of files and perform a task that requires a var to be passed between two pipes.
I'm using node uuid to create a v3 UUID for each file path to
ultimately end up with a uuid for each page. I'm grabbing the file path with gulp-print.
I want to store that uuid value as a var. In the next pipe Im using
gulp-inject-string to write it into the page during the build.
Help: Either I need help getting the file path inside the gulp-inject-string pipe or I need to pass the var between the two different pipes. If I globally set a var with a default value outside the src it gets passed easily to the pipe(inject).
Super simplified code below:
// test code
var gulp = require('gulp');
var print = require('gulp-print');
var inject = require('gulp-inject-string');
var reload = browserSync.reload;
const uuidv3 = require('uuid/v3');
var uuid;
gulp.task('uuid', function() {
return gulp.src('**/*.html'])
// create uuid
.pipe(print(function(filepath) {
uuid = uuidv3(filepath, uuidv3.URL);
return "compiled: " + filepath + ' uuid: ' + uuid;
}))
// need to to add UUIDv3 to each page
.pipe(inject.before('</head>', '<meta name="dc.identifier" content="' + uuid + '">'))
.pipe(gulp.dest('/prod/./'))
.pipe(reload({ stream: true }));
});
It's worth noting that I need a cross platform way to get the file path starting in the root of the project and including forward slashes. The gulp(print) does this perfectly starting at the root of the project and ignoring anything upstream from that point. The format of the path is important because it's one half of the equation in creating the uuid and the uuid's must match on Mac or PC platforms.
examples:
/index.html
/dir1/file.html
/dir1/dir2/dir3/file.html
var gulp = require('gulp');
var print = require('gulp-print');
var inject = require('gulp-inject-string');
const uuidv3 = require('uuid/v3');
var tap = require('gulp-tap');
// you can declare here
var uuid;
gulp.task('pages', function() {
// or you can declare here
var uuid;
return gulp.src('**/*.html')
// bunch of stuff happens here involving templating/minifying
// create uuid
.pipe(print(function(filepath) {
// then set it here and use it further below
// it will be available
uuid = uuidv3(filepath, uuidv3.URL);
return "compiled: " + filepath + ' uuid: ' + uuid;
}))
// need to to add UUIDv3 to each page
//.pipe(inject.before('</head>', '<meta name="dc.identifier" content="' + uuid + '">\n'))
.pipe(tap(function(file, t) {
return t.through(inject.before('</head>', '<meta name="dc.identifier" content="' + uuid + '">\n');
})
.pipe(gulp.dest('/prod/./'))
.pipe(reload({stream:true}));
});
You are just creating a variable at a higher scope that you can set and refer to later. If you need a bunch of them create an array with filepath as an index. But I would try it first as just a simple value.
I solved the problem. It was an amateur mistake. I returned the statement where the var was set so the var was essentially killed. Updated code that allows the var to pass through the pipes.
var gulp = require('gulp');
var print = require('gulp-print');
var replace = require('gulp-replace');
const uuidv3 = require('uuid/v3');
var uuid;
gulp.task('build', function() {
return gulp.src('**/*.html')
// get a cross-platform filepath and create a uuid
.pipe(print(function(filepath) {
uuid = uuidv3(filepath, uuidv3.URL);
}))
// inject uuid
.pipe(replace('dc.identifier" content=""', function() {
return 'dc.identifier" content="' + uuid + '"';
}))
.pipe(gulp.dest('/prod/./'));
});
The var uuid passes through the pipes just fine now. This code creates a UUID based on a cross-platform file path and injects it into an empty dc.identifier meta tag.

gulp - wrap plugin (which uses through2) output with string

I would like to know how exactly can I manipulate the output of my Gulp plugin so, for example, no matter how many files are passed to the plugin, it will wrap the output with a string. Currently I cannot know when does the last file is done.
The super simplified example below will iterate on 3 files and will create a new file named output.js and in it there will be three times the string xxx (xxxxxxxxx).
I would like the plugin itself to wrap the contents so the output will
be: +xxxxxxxxx+.
How can I do this?
Thanks!
Gulpfile
var gulp = require('gulp');
var concat = require('gulp-concat');
var foo = require('./index');
gulp.task('default', function() {
gulp.src([a.html, b.html, c.html])
.pipe(foo())
.pipe(concat('output.js'))
.pipe(gulp.dest('./test/output'))
});
The most basic gulp plugin (index.js):
var through2 = require('through2'),
gutil = require('gulp-util');
var PLUGIN_NAME = 'foo';
module.exports = function( options ){
// through2.obj(fn) is a convenience wrapper around
// through2({ objectMode: true }, fn)
return through2.obj(function( file, enc, callback ){
file.contents = new Buffer( 'xxx' );
this.push(file);
callback();
});
}
I understand the files are currently simply returned modified, but what I don't understand is how to append text and return the concatenated result that I want, while keeping it OK with Gulp working standards.
The "real" plugin should actually wrap the files results with:
var foo = { FILES_CONTENT }
where FILES_CONTENT will actually be a a concatenated string of all the files:
"file_name" : "file_content",
"file_name" : "file_content",
...
I would make the following changes to your gulpfile.js:
var gulp = require('gulp');
var foo = require('./index.js');
gulp.task('default', function() {
return gulp.src(['a.html', 'b.html', 'c.html'])
.pipe(foo({fileName:'output.js', varName:'bar'}))
.pipe(gulp.dest('./test/output'))
});
Since your foo() plugin itself will concatenate all the files, there's no need to use gulp-concat at all. Instead your plugin should accept an option fileName that provides the name of the generated file. I've also added another option varName that will provide the name of the var in the output file.
I'll assume that a.html, b.html and c.html are simple HTML files, something like this:
<h1 class="header">a</h1>
As you've already realized you need to concat all the files in the plugin itself. That's not really difficult however and doesn't require a lot of code. Here's a index.js which does exactly that:
var through2 = require('through2'),
gutil = require('gulp-util'),
path = require('path'),
File = require('vinyl');
var PLUGIN_NAME = 'foo';
module.exports = function(options) {
var files = { };
var outputFile = null;
return through2.obj(function(file, enc, callback){
outputFile = outputFile || file;
var filePath = path.relative(file.base, file.path);
files[filePath] = file.contents.toString();
callback();
}, function(callback) {
outputFile = outputFile ? outputFile.clone() : new File();
outputFile.path = path.resolve(outputFile.base, options.fileName);
outputFile.contents = new Buffer(
'var ' + options.varName + ' = ' +
JSON.stringify(files, null, 2) + ';'
);
this.push(outputFile);
callback();
});
}
Since you want to output a key/value mapping from file names to file contents our transformFunction just stores both of those things in a regular JavaScript object files. None of the input files themselves are emitted. Their names and contents are just stored until we have all of them.
The only tricky part is making sure that we respect the .base property of each file as is customary for gulp plugins. This allows the user to provide a custom base folder using the base option in gulp.src().
Once all files have been processed through2 calls the flushFunction. In there we create our output file with the provided fileName (once again making sure we respect the .base property).
Creating the output file contents is then just a matter of serializing our files object using JSON.stringify() (which automatically takes care of any escaping that has to be done).
The resulting ./test/output/output.js will then look like this:
var bar = {
"a.html": "<h1 class=\"header\">a</h1>\n",
"b.html": "<h1 class=\"header\">b</h1>\n",
"c.html": "<h1 class=\"header\">c</h1>\n"
};
You should use the gulp pipeline technique (standard).
This means that you can use the gulp-insert package in order
to add the string xxx.
var insert = require('gulp-insert');
.pipe(insert.append('xxx')); // Appends 'xxx' to the contents of every file
You can also prepend, append and wrap with this package and it support of course the gulp standards.
So the full example will be:
var gulp = require('gulp');
var concat = require('gulp-concat');
var foo = require('./index');
var insert = require('gulp-insert');
gulp.task('default', function() {
gulp.src([a.html, b.html, c.html])
.pipe(foo()
.pipe(insert.append('xxx'))
.pipe(concat('output.js'))
.pipe(gulp.dest('./test/output'))
});

const usage inside for loop, why this behaviour?

I have a for loop in my nodejs code
const saveDocument = co.wrap(function *(documentData, user, locale) {
var now = moment();
var creationDateLongString = now.format("YYYYMMDDHHmmss");
var creationDateShortString = now.format("YYYYMMDD");
var outputChildFolder = documentData.code + '_' + creationDateLongString + '_' + documentCounter;
var outputFolder = config.files.incomingDocumentsDir + '/' + outputChildFolder;
++documentCounter;
yield fs.mkdir(outputFolder)
var xmlFileName = documentData.code + "-" + creationDateLongString + ".xml";
var pdfFileName = documentData.code + "-" + creationDateLongString + ".pdf";
const pages = [];
for(var index=0;index < documentData.pages.length; ++index) {
const page = documentData.pages[index];
var data = new Buffer(page, "base64");
var dataEncoding = imageType(data).mime === "image/png" ? "png" : "jpg";
var fileName = "page" + index + "." + dataEncoding;
var targetFilePath = outputFolder + "/" + fileName
yield fs.writeFile(targetFilePath,data);
pages.push(fileName);
}
...
}
What I don't understand is why in the above code page only gets assigned once, on the first iteration, and holds that same value during the other iterations. So if I have 5 pages I end up 5 times with the data of the first page in that variable.
I am running node 4 without any special arguments or postprocessor. Simply npm run ... which maps to a node src/main/myApp.js in my package.json
I am probably missing something simple here but I've never seen this before when doing client side ES6 code. The big difference of course being that the client side code goes through Babel + Webpack and the server side code is ran directly through node.
Small addendum: if you are wondering why the "old school" for syntax and not something along the lines of pages.forEach(...., it's because this is existing code where I just did a few minor modifications.
This will work as you are expecting in strict mode. Try adding...
"use strict";
You will only see this behavior in environments (like Node) that actually respect and enforce the keyword. Babel simply converts all let and const to var right now to provide ES5 compatibility. To demonstrate, take a look at this Babel example. You can see in the output that const has been changed to var

Node js fs.exists only searching file in current directory

I have setup process.env.path variables and fs.exists(fileName). Node is not able to find the file if it is not in its currently directory.
Is there someway i can configure node to search for file in all directory mentioned in 'process.env.path'.
This isn't supported out of the box. You will have to find a suitable npm package that already does this for you or write your own code. Something along the lines of the code that is in node-whereis:
var fs = require('fs');
function whereIsMyFile(filename){
var pathSep = process.platform === 'win32' ? ';' : ':';
var directories = process.env.PATH.split(pathSep);
for (var i = 0; i < directories.length; i++) {
var path = directories[i] + '/' + filename;
if (fs.existsSync(path)) {
return path;
}
}
return null;
}

How do you upload, stream, and hash a file's contents in Node.js?

I'd like to upload files on my server and name them according to their contents. This should be simple (it is in python), but I am having a hard time figuring out how to do it in Node.js.
I am using express and connect-form, which really just uses formidable. I also see that node has a library called crypto that is very similar to python's hashlib. Now I just need to understand how to stream the temp file connect-form gives me and hash it.
This is a Python/Flask(ish) implementation of what I'd like to do.
import hashlib
from Flask import request
def upload():
file = request.files['file']
hash = hashlib.sha256()
name, ext = file.filename.rsplit('.', 1)
try:
for chunk in file.chunks()
hash.update(chunk)
finally:
file.seek(0)
new_name = "%s.%s" % (hash.hexdigest(),ext)
file.save(os.path.join(UPLOAD_DIR, new_name))
I have seen a lot of these toy answers that just print out the file's name, but none that actually read and write the data.
Here it is in coffee-script in all its glory.
app.post '/upload', (request, response, next) ->
request.form.complete (error, fields, files) ->
if error
next error
else
file = files.file
[kind, extension] = file.type.split '/'
hash = crypto.createHash 'sha256'
stream = fs.createReadStream file.path,
encoding:'binary'
stream.addListener 'data', (chunk) ->
hash.update chunk
stream.addListener 'close', ->
digest = hash.digest 'hex'
new_filename = "#{digest}.#{extension}"
new_path = "#{UPLOAD_DIR}/#{new_filename}"
fs.rename file.path, new_path
response.end new_filename
Rather than hacking the hash calculation into formidable, which would likely be more efficient but far more complicated, I opted to just re-read the file from its temporary location and hash that. Then instead of pumping it like in other examples, I just renamed the temp file into its new location.
you can save a file like this:
var fs = require('fs'),
util = require('util'),
crypto = require('crypto');
// ...
req.form.complete(function (err, fields, files) {
// ...
var ext = files['content[media]']['filename'].split('.');
ext = ext[ext.length-1];
ext = ext.toLowerCase();
var newFileName = req['connection']['remoteAddress'] + req['connection']['remotePort'] + Date.now();
newFileName = crypto.createHash('md5').update(newFileName).digest("hex");
newFileName += '.' + ext;
var is = fs.createReadStream(files['content[media]']['path']);
var os = fs.createWriteStream(app.set('dataDir') + '/' + newFileName);
// copy file to public folder
util.pump(is, os, function(error) {
if (error) {
console.log("Error copying file to public ... " + error);
res.redirect("back");
return;
}
else {
// delete temp file
fs.unlinkSync(files['content[media]']['path']);
res.redirect('....');
}
});
});

Resources