I have problems debugging contenerized application using WebStorm (or IntelliJ) remote debugging option which works perfect when trying to remote debug application running locally.
Here are the relevant parts of my gulp config file:
gulp.task("mainCompile", () =>
gulp.src(`${config.src.main.baseDir}/**/*.js`, {base: "./src"})
.pipe(plugins.sourcemaps.init())
.pipe(plugins.babel())
.pipe(plugins.sourcemaps.write(".", {
includeContent: false,
sourceRoot: "../src"
}))
.pipe(gulp.dest(config.build.baseDir))
);
gulp.task("nodemon", ["copyResources", "compile"], () =>
plugins.nodemon({
execMap: {
js: "node --debug-brk"
},
script: path.join(config.build.mainDir, "index.js"),
ext: "js yaml",
delay: 2500,
debug: true,
verbose: true,
watch: config.src.main.baseDir,
tasks: ["copyNonJs", "yamlToJson", "compile"]
})
);
And the part of compose config. As you can see I mount the project root at /app in the container.
server:
image: justmarried/jmserver:development-latest
build:
context: .
dockerfile: containers/develop/Dockerfile
ports:
- 2701:2701
- 5858:5858
volumes:
- ./:/app
Remote debugger attaches but stops only for breakpoints defined in index.js (or if I put a breakpoint inside "build" dir which is not what I want). When the app is fully loaded I get this:
As you can see index.js gets mapped perfectly but none of the other files do. Tried to inline code into sourcemaps but it didn't help. Is it a bug in WebStorm (newest, 2017.1.3)?
One more observation. When I do "show actual source" of any files in build dir in scripts tab in debug tab I get sth. like this:
(function (exports, require, module, __filename, __dirname) { "use strict";
var _httpStatus = require("http-status");
var _httpStatus2 = _interopRequireDefault(_httpStatus);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
module.exports = {
get: function get(req, res) {
res.status(_httpStatus2.default.OK).json({
text: "Hi there!"
});
}
};
//# sourceMappingURL=ping.js.map
});
But when I do "show source" I get
"use strict";
var _httpStatus = require("http-status");
var _httpStatus2 = _interopRequireDefault(_httpStatus);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
module.exports = {
get: function get(req, res) {
res.status(_httpStatus2.default.OK).json({
text: "Hi there!"
});
}
};
//# sourceMappingURL=ping.js.map
Of course the actual source of that file resides in the src/main dir not build/main dir and is:
import HttpStatus from "http-status";
module.exports = {
get(req, res) {
res.status(HttpStatus.OK).json({
text: "Hi there!"
});
}
};
The contents of that ping.js.map is:
{"version":3,"sources":["main/handlers/ping.js"],"names":["module","exports","get","req","res","status","OK","json","text"],"mappings":";;AAAA;;;;;;AAEAA,OAAOC,OAAP,GAAiB;AACfC,KADe,eACXC,GADW,EACNC,GADM,EACD;AACZA,QAAIC,MAAJ,CAAW,qBAAWC,EAAtB,EAA0BC,IAA1B,CAA+B;AAC7BC,YAAM;AADuB,KAA/B;AAGD;AALc,CAAjB","file":"ping.js","sourceRoot":"../../../src"}
Related
I create a package with several choices and depending on the choice I run a bash script .sh with execFile from node it works fine locally with the path "./utils/myScript.sh
but when I create the package with npm pack and install globally it cannot find the script files in the utils directory.
on the other hand if I put the path /usr/local/lib/node_modules/choicescript/utils/myScript.sh
it works
how to tell it to use relative path and not absolute path
I have another minor problem when the script this launches I have my log but I do not see the return of the script displayed as if I launched this one directly with ./myScript.sh
const program = require("commander");
const chalk = require("chalk");
const inquirer = require("inquirer");
const { execFile } = require("child_process");
let opts = {
shell: "/bin/bash",
};
function devFunc() {
execFile("./utils/myScrypt.sh", opts, (error, stdout, stderr) => {
log(stdout);
log(stderr);
if (error !== null) {
log(`exec error: ${error}`);
}
});
log(chalk.green.bold("\033[32;5mDATABASE DEV RUN - host: 127.0.0.1 - port: 3306\033[0m - ") + chalk.blue.bold(`CTRL+C pour quitter`));
}
const { dev, preprod, prod } = program.opts();
if (dev) {
devFunc();
} else if (preprod) {
preprodFunc();
} else if (prod) {
prodFunc();
} else {
inquirer
.prompt([
{
type: "rawlist",
name: "Database_gcp",
message: "Database choice",
choices: ["dev", "preprod", "prod", "exit"],
},
])
.then((gcp) => {
if (gcp.Database_gcp === "dev") {
devFunc();
}
if (gcp.Database_gcp === "preprod") {
preprodFunc();
}
if (gcp.Database_gcp === "prod") {
prodFunc();
}
if (gcp.Database_gcp === "exit") {
log(chalk.blue("EXIT"));
}
});
}
Instead of doing a relative import/require - you can use the __dirname constant and use "join" for better compatibility. The __dirname yields the full path directory of the current module.
//...
const { join } = require('path');
//...
execFile(join(__dirname, 'utils', 'myScrypt.sh'), opts, (error, stdout, // ...
Gulp should copy all sub-folders and files to the dist. But it copies only half of them, some folders will be ignored even if I change their names etc. (no special characters, same subfolder structure as the once that got copied correctly ...) - nothing worked. I just can't see any pattern in this.
There is no error message while running gulp. Nothing that would help me find the error.
This is my gulpfile.babel.js :
import path from 'path'
const gulp = require('gulp');
const sourcemaps = require('gulp-sourcemaps');
const babel = require('gulp-babel');
const concat = require('gulp-concat');
import del from 'del';
import nodemon from 'gulp-nodemon';
const paths = {
js: ['./**/*.js', '!dist/**', '!node_modules/**', '!coverage/**', '!./*.test.js'],
statics: ['./package.json', './.gitignore', './.env', './templates/**/**/*'],
resources: ['./resources/**/*'],
public: ['./public/**/*'],
templates: ['./templates/**/**/**/*'],
tests: './server/tests/*.js',
}
gulp.task('clean', function (done) {
del.sync(['dist/**', 'dist/.*', 'coverage/**', '!dist', '!coverage'])
done()
});
gulp.task('copy', function (done) {
gulp.src(paths.statics).pipe(gulp.dest('dist'))
gulp.src(paths.resources).pipe(gulp.dest('dist/resources'))
gulp.src(paths.public).pipe(gulp.dest('dist/public'))
gulp.src(paths.templates).pipe(gulp.dest('dist/templates'))
done()
})
gulp.task('build', function (done) {
gulp.src([...paths.js, '!gulpfile.babel.js'], { base: '.' })
.pipe(babel(
{
presets: ['#babel/env']
}
))
.pipe(gulp.dest('dist'))
done()
})
gulp.task('start', function (done) {
nodemon({
script: path.join('dist', 'index.js'),
delay: 1000,
ext: 'js',
tasks: ['clean', 'copy', 'build'],
ignore: ['node_modules/**/*.js', 'dist/**/*.js'],
})
done()
})
gulp.task('serve', gulp.series('copy', 'build', 'start'))
gulp.task('default', gulp.series('serve'))
a lot of folders, for example The folder dashboard in routes, does not copy to the dist file.
This is my structure:
- backend
- > server
- > server > routes
- ...
- > server > routes > dashboard
- > server > routes > dashboard > *.js
- ...
- gulpfile.babel.js
Your copy task is creating multiple streams with multiple calls to gulp.src. The correct way to combine them is to use merge-stream:
const mergeStream = require('merge-stream');
gulp.task('copy', function () {
return mergeStream(
gulp.src(paths.statics).pipe(gulp.dest('dist')),
gulp.src(paths.resources).pipe(gulp.dest('dist/resources')),
gulp.src(paths.public).pipe(gulp.dest('dist/public')),
gulp.src(paths.templates).pipe(gulp.dest('dist/templates'))
);
})
Alternatively, since you're simply copying your folder structure in the dist folder, you could also do this:
gulp.task('copy', function () {
return gulp.src([...paths.statics,
...paths.resources,
...paths.public,
...paths.templates],
{base: '.'})
.pipe(gulp.dest('dist'))
})
I currently have 2 separate webpack builds for server rendered vs client rendered code. Is there an easy way to change the build output based on server/client build?
For example something like this:
// Have some code like this
if(is_client){
console.log('x.y.z')
} else {
server.log('x.y.z')
}
// Webpack outputs:
// replaced code in client.js
console.log('x.y.z')
// replaced code in server.js
server.log('x.y.z')
Have you tried anything like this?
// webpack.config.js
module.exports = () => ['web', 'node'].map(target => {
const config = {
target,
context: path.resolve('__dirname', 'src'),
entry: {
[target]: ['./application.js'],
},
output: {
path: path.resolve(__dirname, 'dist', target),
filename: '[name].js'
},
modules: { rules: ... },
plugins: [
new webpack.DefinePlugin({
IS_NODE: JSON.stringify(target === 'node'),
IS_WEB: JSON.stringify(target === 'web'),
}),
],
};
return config;
});
// later in your code
import logger from 'logger';
if (IS_NODE) {
logger.log('this is node js');
}
if (IS_WEB) {
console.log('this is web');
}
how the compilation works?
// client.bundle.js
import logger from 'logger';
// DefinePlugin creates a constant expression which causes the code below to be unreachable
if (false) {
logger.log('this is node js');
}
if (true) {
console.log('this is web');
}
Finally you will produce your build in production mode, so webpack will include a plugin called UglifyJS, this has a feature called dead code removal (aka tree shaking), so it will delete any unused/unreachable code.
and the final result will look like:
// node.bundle.js
import logger from 'logger';
console.log('this is node js');
//web.bundle.js
console.log('this is node js');
I am using requirejs and gulp to build angular app. I am using amd-optimize and gulp-requirejs-optimize to add all js files into single file. Here is my main.js file:
require.config(
{
paths: {
app : 'app',
angular : '../bower_components/angular/angular',
jquery : '../bower_components/jquery/dist/jquery',
angularResource : '../bower_components/angular-resource/angular-resource',
angularRoute : '../bower_components/angular-route/angular-route',
publicModule : 'public_module',
route : 'route'
},
shim: {
'app': {
deps: ['angular']
},
'angularRoute': ['angular'],
angular : {exports : 'angular'}
}
}
);
And gulpfile.js
var gulp = require('gulp');
var rjs = require('gulp-requirejs');
var connect = require('gulp-connect');
var requirejsOptimize = require('gulp-requirejs-optimize');
var amdOptimize = require('amd-optimize');
var concat = require('gulp-concat');
// using amd-optimize.
gulp.task('bundle', function () {
return gulp.src('app/**/*.js')
.pipe(amdOptimize('main'))
.pipe(concat('main-bundle.js'))
.pipe(gulp.dest('dist'));
});
// using gulp-requirejs-optimize.
gulp.task('scripts', function () {
return gulp.src('app/main.js')
.pipe(requirejsOptimize())
.pipe(gulp.dest('dist'));
});
When I run gulp bundle or gulp scripts, it shows me same content of main.js file in output file(not showing all js template in one output file).
The output file is:
require.config({
paths: {
angular: '../bower_components/angular/angular',
jquery: '../bower_components/jquery/dist/jquery',
angularResource: '../bower_components/angular-resource/angular-resource',
angularRoute: '../bower_components/angular-route/angular-route',
publicModule: 'public_module',
route: 'route'
},
shim: {
'app': { deps: ['angular'] },
'angularRoute': ['angular'],
angular: { exports: 'angular' }
}
});
define('main', [], function () {
return;
});
How can I configure gulp to put every js template into one js file?
check the docs for all the options for amdoptimize. For example you can point to your config file or add paths.
I always have trouble getting all the paths to line up, so make sure to check them diligently.
here is how you can start to put the options in:
gulp.task('requirejsBuild', function() {
gulp.src('app/**/*.js',{ base: 'app' })
.pipe(amdOptimize("app",{
baseUrl: config.app,
configFile: 'app/app-config.js',
findNestedDependencies: true,
}))
.pipe(concat('app.js'))
.pipe(gulp.dest('dist'))
});
You are not requiring any files - you just define an empty module named main.
You need to kick off you app by requiring a module, eg.
require(['app'], function (App) {
new App().init();
});
I have a problem including mocha tests in a durandal environment. I want to run my tests with mocha-phantomjs from the command line. The test works perfectly in the browser but as soon as I'm trying to use mocha-phantomjs I'm getting the following error message (command: mocha-phantomjs dummyPage.htm):
Error: Your custom reporter tried to require 'fs', but Mocha is not running in N
ode.js in mocha-phantomjs, so Node modules cannot be required - only other repor
ters
My dummmy html page looks like that:
<script type="text/javascript">
require.config({
baseUrl: '../app/',
paths: {
'app': '../app',
'specs': '../sampleTest/specs/',
'text': '../lib/require/text',
'durandal': '../lib/durandal/js',
'plugins' : '../lib/durandal/js/plugins',
'transitions' : '/lib/durandal/js/transitions',
'knockout': '../lib/knockout/knockout-2.3.0',
'jquery': '../lib/jquery/jquery-1.9.1'
}
});
var runTests = function (specfiles) {
// Initialize mocha and leak assert into the global space.
mocha.setup('bdd');
mocha.reporter('html');
assert = chai.assert;
require(specfiles, function () {
require(['specs/test.spec'],function(spec){
if (window.mochaPhantomJS) {
console.log('test with phantomJS')
mochaPhantomJS.run();
}
else {
mocha.run();
}
});
});
};
runTests();
</script>
and my sample test looks like that:
define(['viewmodels/flickr'], function (flickr) {
describe('Flickr-Test', function(){
it('displayName should be equal to Flickr', function () {
assert.equal(flickr.displayName,'Flickr','should load right view model');
});
it('state should be pending', function () {
assert.equal(flickr.activate().state(),'pending','state should be pending');
});
});
describe("DOM Test", function () {
var el = document.createElement("div");
el.id = "myDiv";
el.innerHTML = "Hello World!";
document.body.appendChild(el);
var myEl = document.getElementById('myDiv');
it("has the right text", function () {
assert.equal(myEl.innerHTML,'Hello World!')
});
});
})
I haven't worked with mocha-phantomjs, but you might be hit by an issue that is described in http://durandaljs.com/documentation/Native-Apps-With-Node-Webkit.html
Next, because node has it's own implementation of require that is
different from require.js, which Durandal uses, we need to patch
things up a bit. To do that, add the following script block to the
head of your index.html:
<script type="text/javascript">
// this doesn't apply when not running with node webkit (nw)
// window.gui = require('nw.gui');
window.requireNode = window.require;
delete window.require;
window.requireNode.version = process.versions.node;
delete process.versions.node;
</script>