Obfuscate RequireJS module name [duplicate] - node.js

This question already has an answer here:
Replace module ids with fake names
(1 answer)
Closed 7 years ago.
My project layout is:
-bin\
-out.closeure.js // (compiled from out.js using the closure compiler)
-out.js // this is the requirejs output file, no minification done)
-src\
-lib\
-library.js
-main.js
-somefile.js
Now when I use RequireJS to combine my project to a single file, is there a way to mangle the names of the module? For example in main.js instead of:
require(['somefile'], function(SomeFile){
//blah
});
I'll have
require(['a6fa7'], function(SomeFile){
//blah
});
Since I am using closure compiler to obfuscated everything, the only thing not being mangled is the module names, and I want to mangle that as well.
I looked at the https://github.com/stevensacks/grunt-requirejs-obfuscate plugin but it's not working and I'm not sure if that plugin does what I want.

Edit1: I encourage to use instead AMDclean, there may be no reason to keep the footprint amd produce on an optimized build. If you want to keep it, the following way is the workaround i made:
Use the onBuildWrite and onBuildRead as the documentation states:
onBuildWrite A function that will be called for every write to an optimized bundle of modules. This allows transforms of the content before serialization.
"replaceModuleNames": [],
//executed on every file read
onBuildRead: function(moduleName, path, contents){
this["replaceModuleNames"].push({
"name": moduleName,
"with": makeid()
});
//Always return a value.
return contents;
},
onBuildWrite: function (moduleName, path, contents) {
console.log("moduleName: " + moduleName + " ,path: " + path + " ,contents: ... ");
var text = contents;
RegExp.escape = function(text) {
return text.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&");
};
this["replaceModuleNames"].forEach(function(element){
var regE = new RegExp('(?:define\\(|require\\().*(?:"|\')('+RegExp.escape(element["name"])+')(?:"|\')');
text = text.replace(regE, function(a, b){
return a.replace(b, element["with"]);
});
console.log("moduleName: "+moduleName+ " replaceWith: " + element["with"] + " result: " + text);
});
//Always return a value.
return text;
}
Place it on the build.js, you have to implement makeid method,
i tested it but it might fail, so dont rely on it for production
Edit2: text.replace(element["name"], element["with"]); may cause inconsistent code, replaced with regexp groups, i am not experienced with regex so its open to improvements.

Related

Node.js fs.writeFile() not creating new files?

I need to create many .json files for the system i am trying to develop. To do this, i ran a for loop over the file names i needed, then used fs.writeFileSync('filename.json', [data]).
However, when trying to open these later, and when I try to find them in the project folder, I cannot find them anywhere.
I have tried writing in a name that was less complex and should have appeared in the same directory as my script, however that was fruitless as well. To my understanding, even if my file name wasn't what I expected it to be, I should get at least something, somewhere, however I end up with nothing changed.
My current code looks like this:
function addEmptyDeparture(date) {
fs.readFileSync(
__dirname + '/reservations/templates/wkend_dep_template.json',
(err, data) => {
if (err) throw err
fs.writeFileSync(
getDepartureFileName(date),
data
)
}
)
}
function getDepartureFileName(date){
return __dirname + '/reservations/' +
date.getFullYear() +
'/departing/' +
(date.getMonth() + 1).toString().padStart(2, "0") +
date.getDate().toString().padStart(2, "0") +
'.json'
}
Where data is the JSON object returned from fs.readFileSync() and is immediately written into fs.writeFileSync(). I don't think I need to stringify this, since it's already a JSON object, but I may be wrong.
The only reason I think it's not working at all (as opposed to simply not showing up in my project) is that, in a later part of the code, we have this:
fs.readFileSync(
getDepartureFileName(date)
)
.toString()
which is where I get an error for not having a file by that name.
It is also worth noting that date is a valid date object, as I was able to test that part in a fiddle.
Is there something I'm misunderstanding in the effects of fs.writeFile(), or is this not the best way to write .json files for use on a server?
You probably are forgetting to stringify the data:
fs.writeFileSync('x.json', JSON.stringify({id: 1}))
I have tried to create similar case using a demo with writeFileSync() creating different files and adding json data to these ,using a for loop. In my case it works . Each time a new file name is created . Here is my GitHub for the same :-
var fs = require('fs');
// Use readFileSync() method
// Store the result (return value) of this
// method in a variable named readMe
for(let i=0 ; i < 4 ; i++) {
var readMe = JSON.stringify({"data" : i});
fs.writeFileSync('writeMe' + i + '.json', readMe, "utf8");
}
// Store the content and read from
// readMe.txt to a file WriteMe.txt
Let me know if this what you have been trying at your end.

How to use underscore.string globally in a web page (with requireJS)

The documentation says once underscore.string included in my application, the library is available using s, __s global variables (http://epeli.github.io/underscore.string/#others), or within underscore properties __.str or __.string.
This simple code should then work properly (with config paths correctly set) :
require(['underscore','underscore.string'], function(){
console.log(s('test').capitalize().value());
});
But it does not work at all. Here is a simple jsfiddle example. I probably did something wrong, but I can't figure out what.
https://jsfiddle.net/mvenrtgy/10/
The relevant test code is:
require.config({
paths: {
'underscore':'https://cdnjs.cloudflare.com/ajax/libs/underscore.js/1.8.3/underscore-min',
'underscore.string':'https://cdnjs.cloudflare.com/ajax/libs/underscore.string/3.3.4/underscore.string.min'
}
});
require(['underscore','underscore.string'], function(){
var t = 'This is a simple text I would like to SLUGIFY using unsercore.string';
// underscore is there!
console.log(typeof _)
// now check where underscore.string is...
console.log(typeof _.str);
console.log(typeof _.string);
console.log(typeof s);
console.log(typeof _s);
document.getElementsByTagName('body')[0].innerHTML = '<p>Sample text : ' + t + '</p>';
// this line will cause an error
document.getElementsByTagName('body')[0].innerHTML += '<p><em>Slugified</em> text : ' + s(t).slugify().value() + '</p>';
});
underscore.string is a well-behaved AMD library so it does not pollute the global space. You should change your require call so that it gets the parameters that correspond to the modules you are loading:
require(['underscore','underscore.string'], function(_, s){
By just making this change to your code, I get something that works. The 2nd line of your test comes up as:
Slugified text : this-is-a-simple-text-i-would-like-to-slugify-using-unsercore-string
Also, while underscore.string started as an underscore extension, it has grown beyond that, so you cannot access it through the _ global that underscore leaks into the global space, unless you take care of mixing it in with underscore. The documentation says you can do this:
_.mixin(s.exports());
But the documentation recommends against it because underscore.string will interfere with some of the basic functions that underscore exports. Use at your own risks.

Meteor/Node writeFile crashes server

I have the following code:
Meteor.methods({
saveFile: function(blob, name, path, encoding) {
var path = cleanPath(path), fs = __meteor_bootstrap__.require('fs'),
name = cleanName(name || 'file'), encoding = encoding || 'binary',
chroot = Meteor.chroot || 'public';
// Clean up the path. Remove any initial and final '/' -we prefix them-,
// any sort of attempt to go to the parent directory '..' and any empty directories in
// between '/////' - which may happen after removing '..'
path = chroot + (path ? '/' + path + '/' : '/');
// TODO Add file existance checks, etc...
fs.writeFile(path + name, blob, encoding, function(err) {
if (err) {
throw (new Meteor.Error(500, 'Failed to save file.', err));
} else {
console.log('The file ' + name + ' (' + encoding + ') was saved to ' + path);
}
});
function cleanPath(str) {
if (str) {
return str.replace(/\.\./g,'').replace(/\/+/g,'').
replace(/^\/+/,'').replace(/\/+$/,'');
}
}
function cleanName(str) {
return str.replace(/\.\./g,'').replace(/\//g,'');
}
}
});
Which I took from this project
https://gist.github.com/dariocravero/3922137
The code works fine, and it saves the file, however it repeats the call several time and each time it causes meteor to reset using windows version 0.5.4. The F12 console ends up looking like this: . The meteor console loops over the startup code each time the 503 happens and repeats the console logs in the saveFile function.
Furthermore in the target directory the image thumbnail keeps displaying and then display as broken, then a valid thumbnail again, as if the fs is writing it multiple times.
Here is the code that calls the function:
"click .savePhoto":function(e, template){
e.preventDefault();
var MAX_WIDTH = 400;
var MAX_HEIGHT = 300;
var id = e.srcElement.id;
var item = Session.get("employeeItem");
var file = template.find('input[name='+id+']').files[0];
// $(template).append("Loading...");
var dataURL = '/.bgimages/'+file.name;
Meteor.saveFile(file, file.name, "/.bgimages/", function(){
if(id=="goodPhoto"){
EmployeeCollection.update(item._id, { $set: { good_photo: dataURL }});
}else{
EmployeeCollection.update(item._id, { $set: { bad_photo: dataURL }});
}
// Update an image on the page with the data
$(template.find('img.'+id)).delay(1000).attr('src', dataURL);
});
},
What's causing the server to reset?
My guess would be that since Meteor has a built-in "automatic directories scanning in search for file changes", in order to implement auto relaunching of the application to newest code-base, the file you are creating is actually causing the server reset.
Meteor doesn't scan directories beginning with a dot (so called "hidden" directories) such as .git for example, so you could use this behaviour to your advantage by setting the path of your files to a .directory of your own.
You should also consider using writeFileSync insofar as Meteor methods are intended to run synchronously (inside node fibers) contrary to the usual node way of asynchronous calls, in this code it's no big deal but for example you couldn't use any Meteor mechanics inside the writeFile callback.
asynchronousCall(function(error,result){
if(error){
// handle error
}
else{
// do something with result
Collection.update(id,result);// error ! Meteor code must run inside fiber
}
});
var result=synchronousCall();
Collection.update(id,result);// good to go !
Of course there is a way to turn any asynchronous call inside a synchronous one using fibers/future, but that's beyond the point of this question : I recommend reading this EventedMind episode on node future to understand this specific area.

requiring optional javascript files with requirejs (e.g. require a locale specific file)

A few of the jquery plugins we rely on offer the ability to include locale/culture specific files to make non-en-us users feel more at home with their functionality (for example, jquery-globalize and bootstrap-datapicker).
The old school way of achieving this was as follows (where 'en-AU' is determined on the fly, sometimes resulting in a 404 for missing cultures):
<script type="text/javascript" src="/js/globalize/globalize.js"></script>
<script type="text/javascript" src="/js/globalize/cultures/globalize.culture.en-AU.js"></script>
Is there a recommended way of achieving this with requirejs (note that globalize is included as a shim)?
Here's my first attempt, not yet sure how the requirejs optomizer will handle it...
Globalize = require("globalize");
...
locale = module.config().locale;
if (locale != null) {
require(["globalize/globalize.culture." + locale], function() {
logger.debug("Loaded locale '" + locale + "'");
Globalize.culture(locale);
}, function() {
logger.debug("Unable to load locale '" + locale + "'");
});
}
edit: the optimizer handles it fine but but the solution doesn't really work because the culture file is loaded asynchronously it's possible that the app has used Globalize before the culture is set.
Kendo does something similar in their code.
This is a slight modification of their technique. This particular style allows the modules to load either through require or direct script inclusion.
I modified it slightly to set up the prereqs. On the downside, this code will no longer work through the r.js optimizer.
var myLocalPrereqs = ['file_english.min'];
("function" == typeof define && define.amd ? define : function (preReqs, func) {
return func()
})(myLocalPrereqs, function () {
(function () {
// Your module code goes here...
})()
});

Can I load multiple files with one require statement?

maybe this question is a little silly, but is it possible to load multiple .js files with one require statement? like this:
var mylib = require('./lib/mylibfiles');
and use:
mylib.foo(); //return "hello from one"
mylib.bar(): //return "hello from two"
And in the folder mylibfiles will have two files:
One.js
exports.foo= function(){return "hello from one";}
Two.js
exports.bar= function(){return "hello from two";}
I was thinking to put a package.json in the folder that say to load all the files, but I don't know how. Other aproach that I was thinking is to have a index.js that exports everything again but I will be duplicating work.
Thanks!!
P.D: I'm working with nodejs v0.611 on a windows 7 machine
First of all using require does not duplicate anything. It loads the module and it caches it, so calling require again will get it from memory (thus you can modify module at fly without interacting with its source code - this is sometimes desirable, for example when you want to store db connection inside module).
Also package.json does not load anything and does not interact with your app at all. It is only used for npm.
Now you cannot require multiple modules at once. For example what will happen if both One.js and Two.js have defined function with the same name?? There are more problems.
But what you can do, is to write additional file, say modules.js with the following content
module.exports = {
one : require('./one.js'),
two : require('./two.js'),
/* some other modules you want */
}
and then you can simply use
var modules = require('./modules.js');
modules.one.foo();
modules.two.bar();
I have a snippet of code that requires more than one module, but it doesn't clump them together as your post suggests. However, that can be overcome with a trick that I found.
function requireMany () {
return Array.prototype.slice.call(arguments).map(function (value) {
try {
return require(value)
}
catch (event) {
return console.log(event)
}
})
}
And you use it as such
requireMany("fs", "socket.io", "path")
Which will return
[ fs {}, socketio {}, path {} ]
If a module is not found, an error will be sent to the console. It won't break the programme. The error will be shown in the array as undefined. The array will not be shorter because one of the modules failed to load.
Then you can bind those each of those array elements to a variable name, like so:
var [fs, socketio, path] = requireMany("fs", "socket.io", "path")
It essentially works like an object, but assigns the keys and their values to the global namespace. So, in your case, you could do:
var [foo, bar] = requireMany("./foo.js", "./bar.js")
foo() //return "hello from one"
bar() //return "hello from two"
And if you do want it to break the programme on error, just use this modified version, which is smaller
function requireMany () {
return Array.prototype.slice.call(arguments).map(require)
}
Yes, you may require a folder as a module, according to the node docs. Let's say you want to require() a folder called ./mypack/.
Inside ./mypack/, create a package.json file with the name of the folder and a main javascript file with the same name, inside a ./lib/ directory.
{
"name" : "mypack",
"main" : "./lib/mypack.js"
}
Now you can use require('./mypack') and node will load ./mypack/lib/mypack.js.
However if you do not include this package.json file, it may still work. Without the file, node will attempt to load ./mypack/index.js, or if that's not there, ./mypack/index.node.
My understanding is that this could be beneficial if you have split your program into many javascript files but do not want to concatenate them for deployment.
You can use destructuring assignment to map an array of exported modules from require statements in one line:
const requires = (...modules) => modules.map(module => require(module));
const [fs, path] = requires('fs', 'path');
I was doing something similar to what #freakish suggests in his answer with a project where I've a list of test scripts that are pulled into a Puppeteer + Jest testing setup. My test files follow the naming convention testname1.js - testnameN.js and I was able use a generator function to require N number of files from the particular directory with the approach below:
const fs = require('fs');
const path = require('path');
module.exports = class FilesInDirectory {
constructor(directory) {
this.fid = fs.readdirSync(path.resolve(directory));
this.requiredFiles = (this.fid.map((fileId) => {
let resolvedPath = path.resolve(directory, fileId);
return require(resolvedPath);
})).filter(file => !!file);
}
printRetrievedFiles() {
console.log(this.requiredFiles);
}
nextFileGenerator() {
const parent = this;
const fidLength = parent.requiredFiles.length;
function* iterate(index) {
while (index < fidLength) {
yield parent.requiredFiles[index++];
}
}
return iterate(0);
}
}
Then use like so:
//Use in test
const FilesInDirectory = require('./utilities/getfilesindirectory');
const StepsCollection = new FilesInDirectory('./test-steps');
const StepsGenerator = StepsCollection.nextFileGenerator();
//Assuming we're in an async function
await StepsGenerator.next().value.FUNCTION_REQUIRED_FROM_FILE(someArg);

Resources