Using Gulp to build requireJS project - gulp-requirejs - requirejs

I am trying to use gulp-requirejs to build a demo project. I expect result to be a single file with all js dependencies and template included. Here is my gulpfile.js
var gulp = require('gulp');
var rjs = require('gulp-requirejs');
var paths = {
scripts: ['app/**/*.js'],
images: 'app/img/**/*'
};
gulp.task('requirejsBuild', function() {
rjs({
name: 'main',
baseUrl: './app',
out: 'result.js'
})
.pipe(gulp.dest('app/dist'));
});
// The default task (called when you run `gulp` from cli)
gulp.task('default', ['requirejsBuild']);
The above build file works with no error, but the result.js only contains the content of main.js and config.js. All the view files, jquery, underscore, backbone is not included.
How can I configure gulp-requirejs to put every js template into one js file?
If it is not the right way to go, can you please suggest other method?
Edit
config.js
require.config({
paths: {
"almond": "/bower_components/almond/almond",
"underscore": "/bower_components/lodash/dist/lodash.underscore",
"jquery": "/bower_components/jquery/dist/jquery",
"backbone": "/bower_components/backbone/backbone",
"text":"/bower_components/requirejs-text/text",
"book": "./model-book"
}
});
main.js
// Break out the application running from the configuration definition to
// assist with testing.
require(["config"], function() {
// Kick off the application.
require(["app", "router"], function(app, Router) {
// Define your master router on the application namespace and trigger all
// navigation from this instance.
app.router = new Router();
// Trigger the initial route and enable HTML5 History API support, set the
// root folder to '/' by default. Change in app.js.
Backbone.history.start({ pushState: false, root: '/' });
});
});
The output is just a combination this two files, which is not what I expected.

gulp-requirejs has been blacklisted by the gulp folks. They see the RequireJS optimizer as its own build system, incompatible with gulp. I don't know much about that, but I did find an alternative in amd-optimize that worked for me.
npm install amd-optimize --save-dev
Then in your gulpfile:
var amdOptimize = require('amd-optimize');
var concat = require('gulp-concat');
gulp.task('bundle', function ()
{
return gulp.src('**/*.js')
.pipe(amdOptimize('main'))
.pipe(concat('main-bundle.js'))
.pipe(gulp.dest('dist'));
});
The output of amdOptimize is a stream which contains the dependencies of the primary module (main in the above example) in an order that resolves correctly when loaded. These files are then concatenated together via concat into a single file main-bundle.js before being written into the dist folder.
You could also minify this file and perform other transformations as needed.
As an aside, in my case I was compiling TypeScript into AMD modules for bundling. Thinking this through further I realized that when bundling everything I don't need the asynchronous loading provided by AMD/RequireJS. I am going to experiment with having TypeScript compile CommonJS modules instead, then bundling them using webpack or browserify, both of which seem to have good support within gulp.

UPDATE
My previous answer always reported taskReady even if requirejs reported an error. I reconsidered this approach and added error logging. Also I try to fail the build completely as described here gulp-jshint: How to fail the build? because a silent fail really eats your time.
See updated code below.
Drew's comment about blacklist was very helpfull and gulp folks suggest using requirejs directly. So I post my direct requirejs solution:
var DIST = './dist';
var requirejs = require('requirejs');
var requirejsConfig = require('./requireConfig.js').RJSConfig;
gulp.task('requirejs', function (taskReady) {
requirejsConfig.name = 'index';
requirejsConfig.out = DIST + 'app.js';
requirejsConfig.optimize = 'uglify';
requirejs.optimize(requirejsConfig, function () {
taskReady();
}, function (error) {
console.error('requirejs task failed', JSON.stringify(error))
process.exit(1);
});
});
The file at ./dist/app.js is built and uglified. And this way gulp will know when require has finished building. So the task can be used as a dependency.

My solution works like this:
./client/js/main.js:
require.config({
paths: {
jquery: "../vendor/jquery/dist/jquery",
...
},
shim: {
...
}
});
define(["jquery"], function($) {
console.log($);
});
./gulpfile.js:
var gulp = require('gulp'),
....
amdOptimize = require("amd-optimize"),
concat = require('gulp-concat'),
...
gulp.task('scripts', function(cb) {
var js = gulp.src(path.scripts + '.js')
.pipe(cached('scripts'))
.pipe(jshint())
.pipe(jshint.reporter('default'))
.pipe(remember('scripts'))
.pipe(amdOptimize("main",
{
name: "main",
configFile: "./client/js/main.js",
baseUrl: './client/js'
}
))
.pipe(concat('main.js'));
.pipe(gulp.dest(path.destScripts));
}
...
This part was important:
configFile: "./client/js/main.js",
baseUrl: './client/js'
This allowed me to keep my configuration in one place. Otherwise I was having to duplicate my paths and shims into gulpfile.js.

This works for me. I seems that one ought to add in uglification etc via gulp if desired. .pipe(uglify()) ...
Currently I have to duplicate the config in main.js to run asynchronously.
....
var amdOptimize = require("amd-optimize");
...
var js = gulp.src(path.scripts + '.js')
.pipe(cached('scripts'))
.pipe(jshint())
.pipe(jshint.reporter('default'))
.pipe(remember('scripts'))
.pipe(amdOptimize("main",
{
name: "main",
paths: {
jquery: "client/vendor/jquery/dist/jquery",
jqueryColor: "client/vendor/jquery-color/jquery.color",
bootstrap: "client/vendor/bootstrap/dist/js/bootstrap",
underscore: "client/vendor/underscore-amd/underscore"
},
shim: {
jqueryColor : {
deps: ["jquery"]
},
bootstrap: {
deps: ["jquery"]
},
app: {
deps: ["bootstrap", "jqueryColor", "jquery"]
}
}
}
))
.pipe(concat('main.js'));

Try this code in your gulpfile:
// Node modules
var
fs = require('fs'),
vm = require('vm'),
merge = require('deeply');
// Gulp and plugins
var
gulp = require('gulp'),
gulprjs= require('gulp-requirejs-bundler');
// Config
var
requireJsRuntimeConfig = vm.runInNewContext(fs.readFileSync('app/config.js') + '; require;'),
requireJsOptimizerConfig = merge(requireJsRuntimeConfig, {
name: 'main',
baseUrl: './app',
out: 'result.js',
paths: {
requireLib: 'bower_modules/requirejs/require'
},
insertRequire: ['main'],
// aliases from config.js - libs will be included to result.js
include: [
'requireLib',
"almond",
"underscore",
"jquery",
"backbone",
"text",
"book"
]
});
gulp.task('requirejsBuild', ['component-scripts', 'external-scripts'], function (cb) {
return gulprjs(requireJsOptimizerConfig)
.pipe(gulp.dest('app/dist'));
});

Sorry for my english. This solution works for me. (I used gulp-requirejs at my job)
I think you've forgotten to set mainConfigFile in your gulpfile.js. So, this code will be work
gulp.task('requirejsBuild', function() {
rjs({
name: 'main',
mainConfigFile: 'path_to_config/config.js',
baseUrl: './app',
out: 'result.js'
})
.pipe(gulp.dest('app/dist'));
});
In addition, I think when you run that task in gulp, require can not find its config file and

This is not gulp-requirejs fault.
The reason why only main.js and config.js is in the output is because you're not requiring/defining any other files. Without doing so, the require optimizer wont understand which files to add, the paths in your config-file isn't a way to require them!
For example you could load a main.js file from your config file and in main define all your files (not optimal but just a an example).
In the bottom of your config-file:
// Load the main app module to start the app
requirejs(["main"]);
The main.js-file: (just adding jquery to show the technique.
define(["jquery"], function($) {});
I might also recommend gulp-requirejs-optimize instead, mainly because it adds the minification/obfuscation functions gulp-requirejs lacks: https://github.com/jlouns/gulp-requirejs-optimize
How to implement it:
var requirejsOptimize = require('gulp-requirejs-optimize');
gulp.task('requirejsoptimize', function () {
return gulp.src('src/js/require.config.js')
.pipe(requirejsOptimize(function(file) {
return {
baseUrl: "src/js",
mainConfigFile: 'src/js/require.config.js',
paths: {
requireLib: "vendor/require/require"
},
include: "requireLib",
name: "require.config",
out: "dist/js/bundle2.js"
};
})).pipe(gulp.dest(''));
});

Related

Browserify + browserify-ngannotate + Tsify not working

I'm using gulp with browserify and tsify. This has been working quite well. Then I decided to add ng-annotate using browserify-ngannotate.
I've added the ng-annotate browserify transform but it seems that if tsify is added as a plugin the ng-annotate transform is never called.
If I remove the tsify plugin then ng-annote gets called. I've played around and switched around the plugin/transform registration. Am I missing something here, or should I go and log an issue at browserify/tsify?
var browserify = require('browserify');
var browserSyncConfig = require('../config').browserSync;
var browserSync = require('browser-sync').get(browserSyncConfig.instance);
var watchify = require('watchify');
var tsify = require('tsify');
var ngAnnotate = require('browserify-ngannotate');
var mergeStream = require('merge-stream');
var bundleLogger = require('../util/bundleLogger');
var gulp = require('gulp');
var handleErrors = require('../util/handleErrors');
var source = require('vinyl-source-stream');
var config = require('../config').browserify;
var _ = require('lodash');
var browserifyTask = function (devMode) {
var browserifyThis = function (bundleConfig) {
if (devMode) {
// Add watchify args and debug (sourcemaps) option
_.extend(bundleConfig, watchify.args, {debug: true});
// A watchify require/external bug that prevents proper recompiling,
// so (for now) we'll ignore these options during development. Running
// `gulp browserify` directly will properly require and externalize.
bundleConfig = _.omit(bundleConfig, ['external', 'require']);
}
var b = browserify(bundleConfig);
if (bundleConfig.tsify) {
b = b.plugin(tsify, {
noImplicitAny: false,
target: 'ES5',
noExternalResolve: false,
module: 'commonjs',
removeComments: false
});
}
if (bundleConfig.ngAnnotate) {
b = b.transform(ngAnnotate);
}
var bundle = function () {
// Log when bundling starts
bundleLogger.start(bundleConfig.outputName);
return b
.bundle()
// Report compile errors
.on('error', handleErrors)
// Use vinyl-source-stream to make the
// stream gulp compatible. Specify the
// desired output filename here.
.pipe(source(bundleConfig.outputName))
// Specify the output destination
.pipe(gulp.dest(bundleConfig.dest))
.pipe(browserSync.stream());
};
if (devMode) {
// Wrap with watchify and rebundle on changes
b = watchify(b, {
poll: true
});
// Rebundle on update
b.on('update', bundle);
bundleLogger.watch(bundleConfig.outputName);
} else {
// Sort out shared dependencies.
// b.require exposes modules externally
if (bundleConfig.require) b.require(bundleConfig.require);
// b.external excludes modules from the bundle, and expects
// they'll be available externally
if (bundleConfig.external) b.external(bundleConfig.external);
}
return bundle();
};
// Start bundling with Browserify for each bundleConfig specified
return mergeStream.apply(gulp, _.map(config.bundleConfigs, browserifyThis));
};
gulp.task('browserify', function () {
return browserifyTask()
});
// Exporting the task so we can call it directly in our watch task, with the 'devMode' option
module.exports = browserifyTask;
You can solve it by specify extensions in ng-annotate options.
bundler.transform(ngAnnotate, { ext: ['.ts', '.js'] });
I realized I had this problem too, when I added uglifyify to the bundle transforms to produce minified builds.
An important aspect of my solution is that the missing, explicit $inject statements, that ng-annotate should have inserted, doesn't matter until the code is actually minified. Luckily, UglifyJS2, which does the actual minification in uglifyify, got support for handling ng-annotate's ngInject comments in version 2.4.9 (in January, 2014).
So, the solution that worked for me was to install uglifyify:
npm install --save-dev uglifyify
and add the following uglifyify transform to the Browserify bundle:
b.transform({
global: true,
mangle: false,
comments: true,
compress: {
angular: true
}
}, 'uglifyify');
This will make UglifyJS2 insert the appropriate $inject statements into your code before it is minified.
So, to summarize, I did not have a solution for only using ng-annotate, but my solution will add the necessary $inject statements before the code is minified, which is what matters in most cases.

Durandal optimization with Gulp and Gulp-Durandal not working

We are building an application with Durandal which is quite big at the moment and we currently looking into bundling all JS files located in the App folder into a main-built.js file. Pretty basic and usual stuff I guess.
I'm using Gulp with the Gulp-Durandal extension. Here our gulpfile :
var gulp = require('gulp');
var durandal = require('gulp-durandal');
gulp.task('build-portal', function () {
durandal({
baseDir: 'app',
main: 'main.js',
output: 'main-built.js',
almond: false,
minify: false
}).pipe(gulp.dest('app'));
});
And here's a snippet of our main.js file
require.config({
paths: {
'text': '../Scripts/text',
'durandal': '../Scripts/durandal',
'plugins': '../Scripts/durandal/plugins',
'transitions': '../Scripts/durandal/transitions'
},
shim: {
},
waitSeconds: 0
});
define('jquery', [], function () { return jQuery; });
define('knockout', [], function () { return ko; });
define('ga', function () { return ga; });
define(
["require", "exports", "durandal/app", "durandal/viewLocator", "durandal/system", "plugins/router", "services/logger", "modules/knockout.extensions", "modules/knockout.validation.custom"],
function (require, exports, __app__, __viewLocator__, __system__, __router__, __logger__, __koExtensions__, __koValidationCustom__) {
var app = __app__;
var viewLocator = __viewLocator__;
var system = __system__;
var router = __router__;
As you can see in the gulpfile, we do not want to use Almond but RequireJs instead, for some reasons almond isn't workin with our project and anyhow, we prefer RequireJs whether its bigger than almond at the end. That's where it look to brake. Running the command to build the main-built.js file took sometime but at the end I get the file built with everything in it.
The problem is that when I try to load the application, it is stuck to the loading screen. It doesn't go any further and there's no errors at all in the browser console.
I created a new project on the side to test if our code was somewhat faulty and found that it might not. You can found that project here :
https://github.com/maroy1986/DurandalGulpBundling
If I build that project with almond option to true, everything works fine but if I switch almound off to tell gulp to use RequireJs, I got the same behavior as our app. You got stuck at the loading screen, without any errors.
So here I am, I do read a lot on the subject but didn't found anything to solve this. Hope someone here already encounter these behavior and have a solution to share.
Thanks!
I had the same requirement and issue. It seems require.js wasn't calling the main module which will startup the Durandal app, that's why it's stuck in the loading screen. I was able to resolve it by implicitly calling the main module:
gulp.task("durandal", function() {
return durandal({
baseDir: "app",
main: "main.js",
output: "main-built.js",
almond: false,
minify: true,
rjsConfigAdapter: function(config) {
//Tell requirejs to load the "main" module
config.insertRequire = ["main"];
return config;
}
})
.pipe(gulp.dest("dist"));
});
I downloaded your project and tried building it with the latest versions of gulp and durandal. Initially it didn't build and gave me the following error:
TypeError: Cannot read property 'normalize' of undefined
This is a problem with the text-plugin of rjs and you can solve this by adding the following to your gulp-file (next to the almond, minify, output... properties):
rjsConfigAdapter : function(rjsConfig){
rjsConfig.deps = ['text'];
return rjsConfig;
}
Once I did that, the build finished and I could build with or without minify, almond and require and the application works fine.

Importing Sass through npm

Currently in our Sass files we have something like the following:
#import "../../node_modules/some-module/sass/app";
This is bad, because we're not actually sure of the path: it could be ../node_modules, it could be ../../../../../node_modules, because of how npm installs stuff.
Is there a way in Sass that we can search up until we find node_modules? Or even a proper way of including Sass through npm?
If you are looking for a handy answer in 2017 and are using Webpack, this was the easiest I found.
Suppose your module path is like:
node_modules/some-module/sass/app
Then in your main scss file you can use:
#import "~some-module/sass/app";
Tilde operator shall resolve any import as a module.
As Oncle Tom mentioned, the new version of Sass has this new importer option, where every "import" you do on your Sass file will go first through this method. That means that you can then modify the actual url of this method.
I've used require.resolve to locate the actual module entry file.
Have a look at my gulp task and see if it helps you:
'use strict';
var path = require('path'),
gulp = require('gulp'),
sass = require('gulp-sass');
var aliases = {};
/**
* Will look for .scss|sass files inside the node_modules folder
*/
function npmModule(url, file, done) {
// check if the path was already found and cached
if(aliases[url]) {
return done({ file:aliases[url] });
}
// look for modules installed through npm
try {
var newPath = path.relative('./css', require.resolve(url));
aliases[url] = newPath; // cache this request
return done({ file:newPath });
} catch(e) {
// if your module could not be found, just return the original url
aliases[url] = url;
return done({ file:url });
}
}
gulp.task("style", function() {
return gulp.src('./css/app.scss')
.pipe(sass({ importer:npmModule }))
.pipe(gulp.dest('./css'));
});
Now let's say you installed inuit-normalize using node. You can simply "require" it on your Sass file:
#import "inuit-normalize";
I hope that helps you and others. Because adding relative paths is always a pain in the ass :)
You can add another includePaths to your render options.
Plain example
Snippet based on example from Oncle Tom.
var options = {
file: './sample.scss',
includePaths: [
path.join(__dirname, 'bower_components'), // bower
path.join(__dirname, 'node_modules') // npm
]
};
sass.render(options, function(err, result){
console.log(result.css.toString());
});
That should do. You can include the files from package using #import "my-cool-package/super-grid
Webpack and scss-loader example
{
test: /\.scss$/,
loader: 'style!css!autoprefixer?browsers=last 2 version!sass?outputStyle=expanded&sourceMap=true&sourceMapContents=true&includePaths[]=./node_modules'
},
Notice the last argument, includePaths has to be array. Keep in mind to use right format
You can use a Sass importer function to do so. Cf. https://github.com/sass/node-sass#importer--v200.
The following example illustrates node-sass#3.0.0 with node#0.12.2:
Install the bower dependency:
$ bower install sass-mq
$ npm install sass/node-sass#3.0.0-pre
The Sass file:
#import 'sass-mq/mq';
body {
#include mq($from: mobile) {
color: red;
}
#include mq($until: tablet) {
color: blue;
}
}
The node renderer file:
'use strict';
var sass = require('node-sass');
var path = require('path');
var fs = require('fs');
var options = {
file: './sample.scss',
importer: function bowerModule(url, file, done){
var bowerComponent = url.split(path.sep)[0];
if (bowerComponent !== url) {
fs.access(path.join(__dirname, 'bower_components', bowerComponent), fs.R_OK, function(err){
if (err) {
return done({ file: url });
}
var newUrl = path.join(__dirname, 'bower_components', url);
done({ file: newUrl });
})
}
else {
done({ file: url });
}
}
};
sass.render(options, function(err, result){
if (err) {
console.error(err);
return;
}
console.log(result.css.toString());
});
This one is simple and not recursive. The require.resolve function could help to deal with the tree – or wait until npm#3.0.0 to benefit from the flat dependency tree.
I made the sass-npm module specifically for this.
npm install sass-npm
In your SASS:
// Since node_modules/npm-module-name/style.scss exists, this will be imported.
#import "npm-module-name";
// Since just-a-sass-file isn't an installed npm module, it will be imported as a regular SCSS file.
#import "just-a-sass-file";
I normally use gulp-sass (which has the same 'importer' option as regular SASS)
var gulp = require('gulp'),
sass = require('gulp-sass'),
sassNpm = require('sass-npm')();
Then, in your .pipe(sass()), add the importer as an option:
.pipe(sass({
paths: ['public/scss'],
importer: sassNpm.importer,
}))
For dart-sass and commandline user at 2022, just use the --load-path option:
$ npx sass --load-path=node_modules
Important: the whole node_modules folder contains so much, just set it launch extremely slow in watch mode. Your should only set your package paths, eg:
$npx sass -w --load-path=node_modules/foo --load-path=node_modules/bar/scss
From offical docuumentation of Sass, adding ~ to imports should do the job.
However, for some reason it did'nt work for me, and sass compiler still complains that the module cannot be found.
Hence, I tried another method which worked for me without any issues. Here's the solution:
If you are compiling sass files directly from CLI try this:
sass src/main.scss dist/main.css --load-path=node_modules
If you are using npm and/or webpack for compiling sass files, add something like this to the scripts of package.json:
"scripts": {
...
"build": "sass src/main.scss dist/main.css --load-path=node_modules",
...
}
Then Run:
npm run build
Finally, import your modules like this:
#import "some-module/sass/app";
To wrap it up, adding --load-path=node_modules flag solved the issue permanently. For more information you can check:
sass --help

How to use yeoman, grunt, usemin 2 and requirejs?

I'm trying to wrap my head around using Grunt, usemin 2, requirejs, and uglify. What I'm observing in my builds is that requirejs is not properly including my dependencies into my single concatenated build file. When I run index.html out of /dist, I see errors when looking for 'jquery', 'app', and some third party js files or sometimes "define is not defined".
I read the following issues on grunt-usemin and removing the require blocks, but some questions still remain in those threads.
Recommended way to handle RequireJS, concat, uglify
How to handle requirejs in v2.0
I followed up my search and came across this post How to integrate Yeoman and Requirejs, which sort of got me there in that I saw the Requirejs optimizer running when I changed from using grunt-contrib-requirejs to grunt-requirejs. Unfortunately, I still see these errors:
Uncaught ReferenceError: define is not defined.
I have the following in my index.html:
<!-- build:js js/main.js -->
<script src="bower_components/requirejs/require.js"></script>
<script src="js/main.js"></script>
<!-- endbuild -->
Here is my Grunt file: http://jsbin.com/futocusu/3/edit?js
There was talk in issue #112 about creating an article on using Yeoman on this topic, but I don't think it's been written yet.
Has anyone figured out the best way to use usemin v2 with grunt and requirejs to output to a single concat+uglify file on build? I'm also not sure what the difference is in using grunt-contrib-requirejs and grunt-requirejs and when to use which one.
It looks as though you are trying to do too much with main.js.
I have the following build tasks in Gruntfile.js
grunt.registerTask('build', [
'copy', // copies the src directory to dist (htdocs)
'requirejs', // do an r.js build to concat modular dependencies
'concat:head', // concats js in the head
'uglify:head', // compresses js in the head
'uglify:foot', // compresses js in the foot
'cssmin', // minifies and concats css
'usemin', // runs through html and inputs minified js and css
'clean:afterBuild' // deletes files that are not required for build
]);
Here are each of the relevant Grunt tasks (for me these are stored in separate files because I use load-grunt-config). If you would like to use these in your gruntfile then all you need to do is grab everything that is in the returned object and stick that in your task value in your gruntfile:
copy
module.exports = function (grunt, options) {
return {
main: {
cwd: 'src/',
src: '**',
dest: 'dist/',
expand: true,
flatten: false
},
};
};
requirejs
module.exports = function(grunt, options) {
return {
compile: {
options: {
appDir: "src/to/require/app",
baseUrl: "./",
mainConfigFile: "src/to/require/app/common",
dir: "dist/to/require/app",
// build a common layer
modules: [
{
"name": "common"
}
]
}
}
};
};
concat
module.exports = function (grunt, options) {
return {
head: {
/* other stuff */
},
foot: {
src: [
'dist/to/require/app/some_other_js.js',
'dist/to/require/app/external/require.js',
'dist/to/require/app/external/require.text.js',
'dist/to/require/app/common.js'
],
dest: 'src/to/require/app/compiled_footer_js.js',
}
};
};
uglify
module.exports = function (grunt, options) {
return {
head: {
/* other stuff *
},
foot: {
files: {
'src/to/require/app/compiled_footer_js.min.js': ['src/to/require/app/compiled_footer_js.js']
}
}
};
};
usemin
module.exports = function (grunt, options) {
return {
html: [
'src/path/to/index.html'
]
};
};

How to get a single javascript page using r.js

I am doing my first try using requireJS and it works great !
I now would like to use the optimizer and i meet some issues when running my code in the browser.
I have these JS files:
/public/javascripts/build.js
/public/javascripts/main.js
/public/javascripts/lib/jquery.min.js
/public/javascripts/lib/require.min.js
/public/javascripts/a.js
/public/javascripts/b.js
/public/javascripts/c.js
a.js, b.js and c.js are modules i define for my application using requireJS.
main.js:
require.config({
paths: {
'jQuery': 'lib/jquery.min.js'
},
shim: {
'jQuery': {
exports: '$'
}
}
});
require(['a.js'], function(A){
var Entity = new A();
});
build.js
({
baseUrl: ".",
paths: {
requireLib: "lib/require.min",
jquery: "lib/jquery.min"
},
name: "main",
out: "main-built.js",
include: ["requireLib"]
})
Also i am wondering why do we have to specify the paths of the libraries into the build.js and not the other javascript files.
When i do not use the optimizer and only load the file
<script src="/javascripts/lib/require.min.js" data-main="/javascripts/main"></script>
it works great, but when i run r.js -o ./public/javascripts/build.js and only load
<script src="/javascripts/main-built.js"></script> i get the error Uncaught TypeError: undefined is not a function in the minified code.
How to explain that ?
Here are the logs i get when running r.js
Tracing dependencies for: main
Uglifying file: /public/javascripts/main-built.js
/public/javascripts/main-built.js
----------------
/public/javascripts/lib/require.min.js
/public/javascripts/a.js
/public/javascripts/b.js
/public/javascripts/lib/jquery.min.js
/public/javascripts/c.js
/public/javascripts/main.js
This is definitely wrong:
require(['a.js'], function(A){
var Entity = new A();
});
You should not use extensions in the list of dependencies you give to require or define. Modules should be named without extension. So here 'a', not 'a.js'. Using 'a.js' will cause RequireJS to fail loading what you really want once the optimizer has run. Let's say you have a file named a.js which has:
define(function () {
return function () {};
});
The optimizer will include it into your main-built.js file like this:
define("a", function () {
return function () {};
});
Note how the first parameter to define is now "a". This has been added by r.js. This is the name of the module. When you load main-built.js, a module named "a" is defined. When you use require with "a.js", you are telling RequireJS you want something in a file named a.js so RequireJS will go looking for that and ignore what is in main-built.js.
Also, jQuery 1.8 or over does not need a shim.
I just have added
shim: {
'jQuery': {
exports: '$'
}
}
into the build.js file, and it works perfectly !
Thanks !

Resources