Babel error: Unknown option: .config when trying to upgrade dependencies of Inferno app - node.js

I have an app which is created with Create Inferno App. I ejected at some point to customize build configs. Now I am trying to upgrade dependencies to the latest versions and replace deprecated dependencies. When trying to build my app. I get this error:
yarn run v1.22.19
$ INLINE_RUNTIME_CHUNK=false node scripts/devbuild.js
Creating a development build...
Failed to compile.
Error: [BABEL] /home/farooqkz/playground/chooj/src/index.js: Unknown option: .config. Check out https://babeljs.io/docs/en/babel-core/#options for more information about options.
error Command failed with exit code 1.
info Visit https://yarnpkg.com/en/docs/cli/run for documentation about this command.
Unfortunately, the error message doesn't say where does it come from. Here's my package.json:
{
"name": "chooj",
"version": "0.0.0",
"private": true,
"dependencies": {
"#svgr/webpack": "^6.5.1",
"KaiUI": "git+https://github.com/farooqkz/KaiUIv2.git",
"#babel/core": "^7.20.2",
"#babel/eslint-parser": "^7.19.0",
"babel-loader": "8.0.4",
"babel-preset-inferno-app": "^8.0.3",
"bfj": "^7.0.0",
"case-sensitive-paths-webpack-plugin": "2.4.0",
"chalk": "2.4.1",
"classnames": "^2.3.1",
"core-js": "^3.26.1",
"css-loader": "^6.7.0",
"dotenv": "16.0.3",
"dotenv-expand": "9.0.0",
"eslint": "^8.27.0",
"eslint-config-inferno-app": "^7.0.2",
"eslint-webpack-plugin": "3.2.0",
"eslint-plugin-flowtype": "^8.0.0",
"eslint-plugin-import": "2.14.0",
"eslint-plugin-inferno": "^7.11.0",
"eslint-plugin-jsx-a11y": "6.1.2",
"file-loader": "^6.2.0",
"fs-extra": "7.0.1",
"html-webpack-plugin": "5.5.0",
"identity-obj-proxy": "3.0.0",
"inferno": "^8.0.0",
"inferno-dev-utils": "^6.0.4",
"inferno-extras": "^8.0.0",
"jsqr": "^1.4.0",
"localforage": "^1.9.0",
"matrix-js-sdk": "^21.1.0",
"mini-css-extract-plugin": "^2.6.0",
"css-minimizer-webpack-plugin": "4.2.2",
"pnp-webpack-plugin": "1.7.0",
"postcss-flexbugs-fixes": "4.1.0",
"postcss-loader": "^7.0.0",
"postcss-preset-env": "^7.8.0",
"postcss-safe-parser": "^6.0.0",
"prettier": "^2.3.2",
"querystring-browser": "^1.0.4",
"react-app-polyfill": "^3.0.0",
"resolve": "^1.22.0",
"sass-loader": "^13.0.0",
"style-loader": "^3.3.0",
"terser-webpack-plugin": "5.3.6",
"url-loader": "1.1.2",
"webpack": "^5.75.0",
"webpack-dev-server": "^4.11.1"
},
"scripts": {
"start": "node scripts/start.js",
"devbuild": "INLINE_RUNTIME_CHUNK=false node scripts/devbuild.js"
},
"eslintConfig": {
"extends": "inferno-app"
},
"browserslist": "ff 48",
"babel": {
"presets": [
"inferno-app"
]
},
"devDependencies": {
"babel-plugin-transform-replace-expressions": "^0.2.0"
}
}
And here's the webpack config I use:
'use strict';
const path = require('path');
const webpack = require('webpack');
const PnpWebpackPlugin = require('pnp-webpack-plugin');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
const ESLintPlugin = require('eslint-webpack-plugin');
const InterpolateHtmlPlugin = require('inferno-dev-utils/InterpolateHtmlPlugin');
const WatchMissingNodeModulesPlugin = require('inferno-dev-utils/WatchMissingNodeModulesPlugin');
const ModuleScopePlugin = require('inferno-dev-utils/ModuleScopePlugin');
const getCSSModuleLocalIdent = require('inferno-dev-utils/getCSSModuleLocalIdent');
const getClientEnvironment = require('./env');
const paths = require('./paths');
const ModuleNotFoundPlugin = require('inferno-dev-utils/ModuleNotFoundPlugin');
// Webpack uses `publicPath` to determine where the app is being served from.
// In development, we always serve from the root. This makes config easier.
const publicPath = '/';
// `publicUrl` is just like `publicPath`, but we will provide it to our app
// as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript.
// Omit trailing slash as %PUBLIC_PATH%/xyz looks better than %PUBLIC_PATH%xyz.
const publicUrl = '';
// Get environment variables to inject into our app.
const env = getClientEnvironment(publicUrl);
// style files regexes
const cssRegex = /\.css$/;
const cssModuleRegex = /\.module\.css$/;
const sassRegex = /\.(scss|sass)$/;
const sassModuleRegex = /\.module\.(scss|sass)$/;
// common function to get style loaders
const getStyleLoaders = (cssOptions, preProcessor) => {
const loaders = [
require.resolve('style-loader'),
{
loader: require.resolve('css-loader'),
options: cssOptions,
},
{
// Options for PostCSS as we reference these options twice
// Adds vendor prefixing based on your specified browser support in
// package.json
loader: require.resolve('postcss-loader'),
options: {
// Necessary for external CSS imports to work
// https://github.com/facebook/create-react-app/issues/2677
ident: 'postcss',
plugins: () => [
require('postcss-flexbugs-fixes'),
require('postcss-preset-env')({
autoprefixer: {
flexbox: 'no-2009',
},
stage: 3,
}),
],
},
},
];
if (preProcessor) {
loaders.push(require.resolve(preProcessor));
}
return loaders;
};
// This is the development configuration.
// It is focused on developer experience and fast rebuilds.
// The production configuration is different and lives in a separate file.
module.exports = {
mode: 'development',
// These are the "entry points" to our application.
// This means they will be the "root" imports that are included in JS bundle.
entry: [
// Include an alternative client for WebpackDevServer. A client's job is to
// connect to WebpackDevServer by a socket and get notified about changes.
// When you save a file, the client will either apply hot updates (in case
// of CSS changes), or refresh the page (in case of JS changes). When you
// make a syntax error, this client will display a syntax error overlay.
// Note: instead of the default WebpackDevServer client, we use a custom one
// to bring better experience for Create Inferno App users. You can replace
// the line below with these two lines if you prefer the stock client:
// require.resolve('webpack-dev-server/client') + '?/',
// require.resolve('webpack/hot/dev-server'),
//require.resolve('inferno-dev-utils/webpackHotDevClient'),
// Finally, this is your app's code:
paths.appIndexJs,
// We include the app code last so that if there is a runtime error during
// initialization, it doesn't blow up the WebpackDevServer client, and
// changing JS code would still trigger a refresh.
],
output: {
// Add /* filename */ comments to generated require()s in the output.
path: paths.appBuild,
pathinfo: true,
// This does not produce a real file. It's just the virtual path that is
// served by WebpackDevServer in development. This is the JS bundle
// containing code from all our entry points, and the Webpack runtime.
filename: 'static/js/[name].[fullhash:6].js',
// There are also additional JS chunk files if you use code splitting.
chunkFilename: 'static/js/[id].[chunkhash].js',
// This is the URL that app is served from. We use "/" in development.
publicPath: publicPath,
// Point sourcemap entries to original disk location (format as URL on Windows)
devtoolModuleFilenameTemplate: info =>
path.resolve(info.absoluteResourcePath).replace(/\\/g, '/'),
},
optimization: {
// Automatically split vendor and commons
// https://twitter.com/wSokra/status/969633336732905474
// https://medium.com/webpack/webpack-4-code-splitting-chunk-graph-and-the-splitchunks-optimization-be739a861366
splitChunks: {
chunks: 'all',
name: false,
},
runtimeChunk: false,
},
resolve: {
// This allows you to set a fallback for where Webpack should look for modules.
// We placed these paths second because we want `node_modules` to "win"
// if there are any conflicts. This matches Node resolution mechanism.
// https://github.com/facebook/create-react-app/issues/253
modules: ['node_modules'].concat(
// It is guaranteed to exist because we tweak it in `env.js`
process.env.NODE_PATH.split(path.delimiter).filter(Boolean)
),
// These are the reasonable defaults supported by the Node ecosystem.
// We also include JSX as a common component filename extension to support
// some tools, although we do not recommend using it, see:
// https://github.com/facebook/create-react-app/issues/290
// `web` extension prefixes have been added for better support
// for React Native Web.
extensions: ['.mjs', '.web.js', '.js', '.json', '.web.jsx', '.jsx'],
alias: {
react: 'inferno-compat',
'react-dom': 'inferno-compat',
// These aliases makes sure all inferno imports and react aliases resolve into same script entry and no duplicates are made
inferno: path.resolve(require.resolve('inferno/dist/index.dev.esm.js')),
// 'inferno-clone-vnode': path.resolve(require.resolve('inferno-clone-vnode/dist/index.dev.esm.js')),
// 'inferno-compat': path.resolve(require.resolve('inferno-compat/dist/index.dev.esm.js')),
// 'inferno-component': path.resolve(require.resolve('inferno-component/dist/index.dev.esm.js')),
// 'inferno-create-class': path.resolve(require.resolve('inferno-create-class/dist/index.dev.esm.js')),
// 'inferno-create-element': path.resolve(require.resolve('inferno-create-element/dist/index.dev.esm.js')),
// 'inferno-devtools': path.resolve(require.resolve('inferno-devtools/dist/index.dev.esm.js')),
// 'inferno-extras': path.resolve(require.resolve('inferno-extras/dist/index.dev.esm.js')),
// 'inferno-hydrate': path.resolve(require.resolve('inferno-hydrate/dist/index.dev.esm.js')),
// 'inferno-hyperscript': path.resolve(require.resolve('inferno-hyperscript/dist/index.dev.esm.js')),
// 'inferno-mobx': path.resolve(require.resolve('inferno-mobx/dist/index.dev.esm.js')),
// 'inferno-redux': path.resolve(require.resolve('inferno-redux/dist/index.dev.esm.js')),
// 'inferno-router': path.resolve(require.resolve('inferno-router/dist/index.dev.esm.js')),
// 'inferno-server': path.resolve(require.resolve('inferno-server/dist/index.dev.esm.js')),
// 'inferno-test-utils': path.resolve(require.resolve('inferno-test-utils/dist/index.dev.esm.js')),
// 'inferno-vnode-flags': path.resolve(require.resolve('inferno-vnode-flags/dist/index.dev.esm.js'))
},
plugins: [
// Adds support for installing with Plug'n'Play, leading to faster installs and adding
// guards against forgotten dependencies and such.
//PnpWebpackPlugin,
// Prevents users from importing files from outside of src/ (or node_modules/).
// This often causes confusion because we only process files within src/ with babel.
// To fix this, we prevent you from importing files out of src/ -- if you'd like to,
// please link the files into your node_modules/ and let module-resolution kick in.
// Make sure your source files are compiled, as they will not be processed in any way.
new ModuleScopePlugin(paths.appSrc, [paths.appPackageJson]),
],
},
resolveLoader: {
modules: ['node_modules'],
extensions: ['.js', '.json'],
mainFields: ['loader', 'main'],
plugins: [
// Also related to Plug'n'Play, but this time it tells Webpack to load its loaders
// from the current package.
PnpWebpackPlugin.moduleLoader(module),
],
},
module: {
strictExportPresence: true,
rules: [
// Disable require.ensure as it's not a standard language feature.
{ parser: { requireEnsure: false } },
{
// "oneOf" will traverse all following loaders until one will
// match the requirements. When no loader matches it will fall
// back to the "file" loader at the end of the loader list.
oneOf: [
// "url" loader works like "file" loader except that it embeds assets
// smaller than specified limit in bytes as data URLs to avoid requests.
// A missing `test` is equivalent to a match.
{
test: [/\.bmp$/, /\.gif$/, /\.jpe?g$/, /\.png$/],
loader: require.resolve('url-loader'),
options: {
limit: 10000,
name: 'static/media/[name].[hash:8].[ext]',
},
},
// Process application JS with Babel.
// The preset includes JSX, Flow, and some ESnext features.
{
test: /\.(js|mjs|jsx)$/,
include: paths.appSrc,
loader: require.resolve('babel-loader'),
options: {
presets: [
'babel-preset-inferno-app/webpack-overrides'
],
// This is a feature of `babel-loader` for webpack (not Babel itself).
// It enables caching results in ./node_modules/.cache/babel-loader/
// directory for faster rebuilds.
cacheDirectory: true,
// Don't waste time on Gzipping the cache
cacheCompression: false,
},
},
// Process any JS outside of the app with Babel.
// Unlike the application JS, we only compile the standard ES features.
{
test: /\.(js|mjs)$/,
exclude: /#babel(?:\/|\\{1,2})runtime/,
include: /node_modules\/.+\.js$/,
loader: require.resolve('babel-loader'),
options: {
babelrc: false,
configFile: false,
compact: false,
presets: [
[
'babel-preset-inferno-app/dependencies',
{ helpers: true },
],
],
cacheDirectory: true,
// Don't waste time on Gzipping the cache
cacheCompression: false,
// If an error happens in a package, it's possible to be
// because it was compiled. Thus, we don't want the browser
// debugger to show the original code. Instead, the code
// being evaluated would be much more helpful.
sourceMaps: false,
},
},
// "postcss" loader applies autoprefixer to our CSS.
// "css" loader resolves paths in CSS and adds assets as dependencies.
// "style" loader turns CSS into JS modules that inject <style> tags.
// In production, we use a plugin to extract that CSS to a file, but
// in development "style" loader enables hot editing of CSS.
// By default we support CSS Modules with the extension .module.css
{
test: cssRegex,
exclude: cssModuleRegex,
use: getStyleLoaders({
importLoaders: 1,
}),
},
// Adds support for CSS Modules (https://github.com/css-modules/css-modules)
// using the extension .module.css
{
test: cssModuleRegex,
use: getStyleLoaders({
importLoaders: 1,
modules: true,
getLocalIdent: getCSSModuleLocalIdent,
}),
},
// Opt-in support for SASS (using .scss or .sass extensions).
// Chains the sass-loader with the css-loader and the style-loader
// to immediately apply all styles to the DOM.
// By default we support SASS Modules with the
// extensions .module.scss or .module.sass
{
test: sassRegex,
exclude: sassModuleRegex,
use: getStyleLoaders({ importLoaders: 2 }, 'sass-loader'),
},
// Adds support for CSS Modules, but using SASS
// using the extension .module.scss or .module.sass
{
test: sassModuleRegex,
use: getStyleLoaders(
{
importLoaders: 2,
modules: true,
getLocalIdent: getCSSModuleLocalIdent,
},
'sass-loader'
),
},
// "file" loader makes sure those assets get served by WebpackDevServer.
// When you `import` an asset, you get its (virtual) filename.
// In production, they would get copied to the `build` folder.
// This loader doesn't use a "test" so it will catch all modules
// that fall through the other loaders.
{
// Exclude `js` files to keep "css" loader working as it injects
// its runtime that would otherwise be processed through "file" loader.
// Also exclude `html` and `json` extensions so they get processed
// by webpacks internal loaders.
exclude: [/\.(js|mjs|jsx)$/, /\.html$/, /\.json$/],
loader: require.resolve('file-loader'),
options: {
name: 'static/media/[name].[hash:8].[ext]',
},
},
],
},
// ** STOP ** Are you adding a new loader?
// Make sure to add the new loader(s) before the "file" loader.
],
},
plugins: [
// Generates an `index.html` file with the <script> injected.
new HtmlWebpackPlugin({
inject: true,
template: paths.appHtml,
}),
// Makes some environment variables available in index.html.
// The public URL is available as %PUBLIC_URL% in index.html, e.g.:
// <link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico">
// In development, this will be an empty string.
new InterpolateHtmlPlugin(HtmlWebpackPlugin, env.raw),
// This gives some necessary context to module not found errors, such as
// the requesting resource.
new ModuleNotFoundPlugin(paths.appPath),
// Makes some environment variables available to the JS code, for example:
// if (process.env.NODE_ENV === 'development') { ... }. See `./env.js`.
new webpack.DefinePlugin(env.stringified),
// This is necessary to emit hot updates (currently CSS only):
//new webpack.HotModuleReplacementPlugin(),
// Watcher doesn't work well if you mistype casing in a path so we use
// a plugin that prints an error when you attempt to do this.
// See https://github.com/facebook/create-react-app/issues/240
new CaseSensitivePathsPlugin(),
// If you require a missing module and then `npm install` it, you still have
// to restart the development server for Webpack to discover it. This plugin
// makes the discovery automatic so you don't have to restart.
// See https://github.com/facebook/create-react-app/issues/186
new WatchMissingNodeModulesPlugin(paths.appNodeModules),
// Moment.js is an extremely popular library that bundles large locale files
// by default due to how Webpack interprets its code. This is a practical
// solution that requires the user to opt into importing specific locales.
// https://github.com/jmblog/how-to-optimize-momentjs-with-webpack
new ESLintPlugin(),
],
// Turn off performance processing because we utilize
// our own hints via the FileSizeReporter
performance: false,
};
And finally, the build script itself:
'use strict';
// Do this as the first thing so that any code reading it knows the right env.
process.env.BABEL_ENV = 'development';
process.env.NODE_ENV = 'development';
// Ensure environment variables are read.
require('../config/env');
const path = require('path');
const chalk = require('chalk');
const fs = require('fs-extra');
const webpack = require('webpack');
const bfj = require('bfj');
const config = require('../config/webpack.config.devbuild');
const paths = require('../config/paths');
const checkRequiredFiles = require('inferno-dev-utils/checkRequiredFiles');
const formatWebpackMessages = require('inferno-dev-utils/formatWebpackMessages');
const printHostingInstructions = require('inferno-dev-utils/printHostingInstructions');
const FileSizeReporter = require('inferno-dev-utils/FileSizeReporter');
const printBuildError = require('inferno-dev-utils/printBuildError');
const measureFileSizesBeforeBuild =
FileSizeReporter.measureFileSizesBeforeBuild;
const printFileSizesAfterBuild = FileSizeReporter.printFileSizesAfterBuild;
const useYarn = fs.existsSync(paths.yarnLockFile);
// These sizes are pretty large. We'll warn for bundles exceeding them.
const WARN_AFTER_BUNDLE_GZIP_SIZE = 512 * 1024;
const WARN_AFTER_CHUNK_GZIP_SIZE = 1024 * 1024;
const isInteractive = process.stdout.isTTY;
// Warn and crash if required files are missing
if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) {
process.exit(1);
}
// Process CLI arguments
const argv = process.argv.slice(2);
const writeStatsJson = argv.indexOf('--stats') !== -1;
// We require that you explictly set browsers and do not fall back to
// browserslist defaults.
const { checkBrowsers } = require('inferno-dev-utils/browsersHelper');
checkBrowsers(paths.appPath, isInteractive)
.then(() => {
// First, read the current file sizes in build directory.
// This lets us display how much they changed later.
return measureFileSizesBeforeBuild(paths.appBuild);
})
.then(previousFileSizes => {
// Remove all content but keep the directory so that
// if you're in it, you don't end up in Trash
fs.emptyDirSync(paths.appBuild);
// Merge with the public folder
copyPublicFolder();
// Start the webpack build
return build(previousFileSizes);
})
.then(
({ stats, previousFileSizes, warnings }) => {
if (warnings.length) {
console.log(chalk.yellow('Compiled with warnings.\n'));
console.log(warnings.join('\n\n'));
console.log(
'\nSearch for the ' +
chalk.underline(chalk.yellow('keywords')) +
' to learn more about each warning.'
);
console.log(
'To ignore, add ' +
chalk.cyan('// eslint-disable-next-line') +
' to the line before.\n'
);
} else {
console.log(chalk.green('Compiled successfully.\n'));
}
console.log('File sizes after gzip:\n');
printFileSizesAfterBuild(
stats,
previousFileSizes,
paths.appBuild,
WARN_AFTER_BUNDLE_GZIP_SIZE,
WARN_AFTER_CHUNK_GZIP_SIZE
);
console.log();
const appPackage = require(paths.appPackageJson);
const publicUrl = paths.publicUrl;
const publicPath = config.output.publicPath;
const buildFolder = path.relative(process.cwd(), paths.appBuild);
printHostingInstructions(
appPackage,
publicUrl,
publicPath,
buildFolder,
useYarn
);
},
err => {
console.log(chalk.red('Failed to compile.\n'));
printBuildError(err);
process.exit(1);
}
)
.catch(err => {
if (err && err.message) {
console.log(err.message);
}
process.exit(1);
}).then(() => {
let jsFile = paths.appBuild + '/static/js/0.chunk.js';
fs.readFile(jsFile).then(buf => {
let s = buf.toString();
s = s.replace('new XHR()', 'new XHR({mozSystem:true})');
s = s.replace('new global.XMLHttpRequest()', 'new global.XMLHttpRequest({mozSystem:true})');
// These two "replace"s are necessary to make WebRTC fully work on KaiOS pre-3.0
s = s.replace(/setRemoteDescription\(([a-zA-Z0-9\.]+)\)/g, (match, p1) => {
return `setRemoteDescription(new RTCSessionDescription(${p1}))`;
});
s = s.replace(/addIceCandidate\(([a-zA-Z0-9\.]+)\)/g, (match, p1) => {
return `addIceCandidate(new RTCIceCandidate(${p1}))`;
});
s = s.replace("!(!remoteStream", "(!remoteStream");
fs.writeFile(jsFile, s).catch(err => {
if (err && err.message) {
console.log(err.message);
}
});
});
});
// Create the production build and print the deployment instructions.
function build(previousFileSizes) {
console.log('Creating a development build...');
let compiler = webpack(config);
return new Promise((resolve, reject) => {
compiler.run((err, stats) => {
let messages;
if (err) {
if (!err.message) {
return reject(err);
}
messages = formatWebpackMessages({
errors: [err.message],
warnings: [],
});
} else {
messages = formatWebpackMessages(
stats.toJson({ all: false, warnings: true, errors: true })
);
}
if (messages.errors.length) {
// Only keep the first error. Others are often indicative
// of the same problem, but confuse the reader with noise.
if (messages.errors.length > 1) {
messages.errors.length = 1;
}
return reject(new Error(messages.errors.join('\n\n')));
}
if (
process.env.CI &&
(typeof process.env.CI !== 'string' ||
process.env.CI.toLowerCase() !== 'false') &&
messages.warnings.length
) {
console.log(
chalk.yellow(
'\nTreating warnings as errors because process.env.CI = true.\n' +
'Most CI servers set it automatically.\n'
)
);
return reject(new Error(messages.warnings.join('\n\n')));
}
const resolveArgs = {
stats,
previousFileSizes,
warnings: messages.warnings,
};
if (writeStatsJson) {
return bfj
.write(paths.appBuild + '/bundle-stats.json', stats.toJson())
.then(() => resolve(resolveArgs))
.catch(error => reject(new Error(error)));
}
return resolve(resolveArgs);
});
});
}
function copyPublicFolder() {
fs.copySync(paths.appPublic, paths.appBuild, {
dereference: true,
filter: file => file !== paths.appHtml,
});
}

Related

MakeCallback DeprecationWarning error when running Foundation for Emails build process

I'm using Foundation for Emails Sass Version to generate HTML emails. I have made a few small changes to the gulpfile and package.json, but for the most part it is exactly what is given on the Foundation for Emails repo.
I'm getting an error when I try to run npm run build. It seems to be something I have added to my template code but I am not sure what it could be.
Here is the error:
[13:48:22] Using gulpfile ~/Development/Work/foundation-email-stack-sass-workflow/gulpfile.babel.js
[13:48:22] Starting 'default'...
[13:48:22] Starting 'build'...
[13:48:22] Starting 'clean'...
[13:48:22] Finished 'clean' after 11 ms
[13:48:22] Starting 'pages'...
[13:48:23] Finished 'pages' after 525 ms
[13:48:23] Starting 'sass'...
[13:48:35] Finished 'sass' after 12 s
[13:48:35] Starting 'images'...
[13:48:39] gulp-imagemin: Minified 27 images (saved 46.34 kB - 1.1%)
[13:48:39] Finished 'images' after 4.04 s
[13:48:39] Starting 'inline'...
(node:35425) [DEP0097] DeprecationWarning: Using a domain property in MakeCallback is deprecated. Use the async_context variant of MakeCallback or the AsyncResource class instead.
[13:48:41] The following tasks did not complete: default, build, inline
[13:48:41] Did you forget to signal async completion?
Here is my gulpfile:
import gulp from 'gulp';
import plugins from 'gulp-load-plugins';
import browser from 'browser-sync';
import rimraf from 'rimraf';
import panini from 'panini';
import yargs from 'yargs';
import lazypipe from 'lazypipe';
import inky from 'inky';
import fs from 'fs';
import siphon from 'siphon-media-query';
import path from 'path';
import merge from 'merge-stream';
import beep from 'beepbeep';
import colors from 'colors';
var helpers = require('handlebars-helpers')();
var ext_replace = require('gulp-ext-replace');
const $ = plugins();
// Look for the --production flag
const PRODUCTION = !!(yargs.argv.production);
const EMAIL = yargs.argv.to;
// Declar var so that both AWS and Litmus task can use it.
var CONFIG;
// Build the "dist" folder by running all of the below tasks
gulp.task('build',
gulp.series(clean, pages, sass, images, inline));
// Build emails, run the server, and watch for file changes
gulp.task('default',
gulp.series('build', server, watch));
// Build emails, then send to litmus
gulp.task('litmus',
gulp.series('build', creds, aws, litmus));
// Build emails, then send to EMAIL
gulp.task('mail',
gulp.series('build', creds, aws, mail));
// Build emails, then zip
gulp.task('zip',
gulp.series('build', zip));
// Delete the "dist" folder
// This happens every time a build starts
function clean(done) {
rimraf('dist', done);
}
// Compile layouts, pages, and partials into flat HTML files
// Then parse using Inky templates
function pages() {
return gulp.src(['src/pages/**/*.{html,hbs,handlebars}', '!src/pages/archive/**/*.{html,hbs,handlebars}'])
.pipe(panini({
root: 'src/pages',
layouts: 'src/layouts',
partials: 'src/partials',
helpers: 'src/helpers'
}))
.pipe(inky())
.pipe(ext_replace('.html'))
.pipe(gulp.dest('dist'));
}
// Reset Panini's cache of layouts and partials
function resetPages(done) {
panini.refresh();
done();
}
// Compile Sass into CSS
function sass() {
return gulp.src('src/assets/scss/**/*.scss')
.pipe($.if(!PRODUCTION, $.sourcemaps.init()))
.pipe($.sass({
includePaths: ['node_modules/foundation-emails/scss']
}).on('error', $.sass.logError))
.pipe($.if(PRODUCTION, $.uncss(
{
html: ['dist/**/*.html']
})))
.pipe($.if(!PRODUCTION, $.sourcemaps.write()))
.pipe(gulp.dest('dist/css'));
}
// Copy and compress images
function images() {
return gulp.src(['src/assets/img/**/*', '!src/assets/img/archive/**/*'])
.pipe($.imagemin())
.pipe(gulp.dest('./dist/assets/img'));
}
// Inline CSS and minify HTML
function inline() {
return gulp.src('dist/**/*.html')
.pipe($.if(PRODUCTION, inliner('dist/css/app.css')))
.pipe(gulp.dest('dist'));
}
// Start a server with LiveReload to preview the site in
function server(done) {
browser.init({
server: 'dist'
});
done();
}
// Watch for file changes
function watch() {
gulp.watch('src/pages/**/*.{html,hbs,handlebars}').on('all', gulp.series(pages, inline, browser.reload));
gulp.watch(['src/layouts/**/*', 'src/partials/**/*']).on('all', gulp.series(resetPages, pages, inline, browser.reload));
gulp.watch(['../scss/**/*.scss', 'src/assets/scss/**/*.scss']).on('all', gulp.series(resetPages, sass, pages, inline, browser.reload));
gulp.watch('src/assets/img/**/*').on('all', gulp.series(images, browser.reload));
}
// Inlines CSS into HTML, adds media query CSS into the <style> tag of the email, and compresses the HTML
function inliner(css) {
var css = fs.readFileSync(css).toString();
var mqCss = siphon(css);
var pipe = lazypipe()
.pipe($.inlineCss, {
applyStyleTags: false,
removeStyleTags: true,
preserveMediaQueries: true,
removeLinkTags: false
})
.pipe($.replace, '<!-- <style> -->', `<style>${mqCss}</style>`)
.pipe($.replace, '<link rel="stylesheet" type="text/css" href="css/app.css">', '')
.pipe($.htmlmin, {
collapseWhitespace: true,
minifyCSS: true
});
return pipe();
}
// Ensure creds for Litmus are at least there.
function creds(done) {
var configPath = './config.json';
try { CONFIG = JSON.parse(fs.readFileSync(configPath)); }
catch(e) {
beep();
console.log('[AWS]'.bold.red + ' Sorry, there was an issue locating your config.json. Please see README.md');
process.exit();
}
done();
}
// Post images to AWS S3 so they are accessible to Litmus and manual test
function aws() {
var publisher = !!CONFIG.aws ? $.awspublish.create(CONFIG.aws) : $.awspublish.create();
var headers = {
'Cache-Control': 'max-age=315360000, no-transform, public'
};
return gulp.src('./dist/assets/img/*')
// publisher will add Content-Length, Content-Type and headers specified above
// If not specified it will set x-amz-acl to public-read by default
.pipe(publisher.publish(headers))
// create a cache file to speed up consecutive uploads
//.pipe(publisher.cache())
// print upload updates to console
.pipe($.awspublish.reporter());
}
// Send email to Litmus for testing. If no AWS creds then do not replace img urls.
function litmus() {
var awsURL = !!CONFIG && !!CONFIG.aws && !!CONFIG.aws.url ? CONFIG.aws.url : false;
return gulp.src('dist/**/*.html')
.pipe($.if(!!awsURL, $.replace(/=('|")(\/?assets\/img)/g, "=$1"+ awsURL)))
.pipe($.litmus(CONFIG.litmus))
.pipe(gulp.dest('dist'));
}
// Send email to specified email for testing. If no AWS creds then do not replace img urls.
function mail() {
var awsURL = !!CONFIG && !!CONFIG.aws && !!CONFIG.aws.url ? CONFIG.aws.url : false;
if (EMAIL) {
CONFIG.mail.to = [EMAIL];
}
return gulp.src('dist/**/*.html')
.pipe($.if(!!awsURL, $.replace(/=('|")(\/?assets\/img)/g, "=$1"+ awsURL)))
.pipe($.mail(CONFIG.mail))
.pipe(gulp.dest('dist'));
}
// Copy and compress into Zip
function zip() {
var dist = 'dist';
var ext = '.html';
function getHtmlFiles(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
var fileExt = path.join(dir, file);
var isHtml = path.extname(fileExt) == ext;
return fs.statSync(fileExt).isFile() && isHtml;
});
}
var htmlFiles = getHtmlFiles(dist);
var moveTasks = htmlFiles.map(function(file){
var sourcePath = path.join(dist, file);
var fileName = path.basename(sourcePath, ext);
var moveHTML = gulp.src(sourcePath)
.pipe($.rename(function (path) {
path.dirname = fileName;
return path;
}));
var moveImages = gulp.src(sourcePath)
.pipe($.htmlSrc({ selector: 'img'}))
.pipe($.rename(function (path) {
path.dirname = fileName + path.dirname.replace('dist', '');
return path;
}));
return merge(moveHTML, moveImages)
.pipe($.zip(fileName+ '.zip'))
.pipe(gulp.dest('dist'));
});
return merge(moveTasks);
}
And my package.json:
{
"name": "foundation-emails-template",
"version": "1.0.0",
"description": "Basic template for a Foundation for Emails project.",
"repository": "zurb/foundation-emails-template",
"main": "gulpfile.babel.js",
"scripts": {
"start": "gulp",
"build": "gulp --production",
"zip": "gulp zip --production",
"litmus": "gulp litmus --production",
"mail": "gulp mail --production"
},
"author": "ZURB <foundation#zurb.com>",
"license": "MIT",
"dependencies": {
"foundation-emails": "^2.2.1",
"handlebars-helpers": "^0.10.0"
},
"devDependencies": {
"babel-core": "^6.3.26",
"babel-preset-es2015": "^6.3.13",
"babel-register": "^6.7.2",
"beepbeep": "^1.2.0",
"browser-sync": "^2.11.0",
"colors": "^1.1.2",
"gulp": ">=4.0",
"gulp-awspublish": "^3.0.1",
"gulp-cli": "^1.1.0",
"gulp-ext-replace": "^0.3.0",
"gulp-html-src": "^1.0.0",
"gulp-htmlmin": "^1.1.1",
"gulp-if": "^2.0.0",
"gulp-imagemin": "^2.4.0",
"gulp-inline-css": "^3.0.0",
"gulp-litmus": "0.0.7",
"gulp-load-plugins": "^1.1.0",
"gulp-mail": "^0.1.1",
"gulp-rename": "^1.2.2",
"gulp-replace": "^0.5.4",
"gulp-sass": "^2.1.0",
"gulp-sourcemaps": "^1.6.0",
"gulp-uncss": "^1.0.1",
"gulp-zip": "^3.2.0",
"inky": "^1.3.6",
"lazypipe": "^1.0.1",
"merge-stream": "^1.0.0",
"panini": "^1.3.0",
"rimraf": "^2.3.3",
"siphon-media-query": "^1.0.0",
"yargs": "^4.1.0"
},
"babel": {
"presets": [
"es2015"
]
}
}
Suggestions?
so, I added a custom helper:
module.exports = function( content ) {
var devmode = content;
if( devmode === true ) {
return "/";
} else {
return "http:*****";
}
}
which used a value in the pages front matter to change URLs:
---
devmode: true
devmode: false
---
to insert a value into pages:
{{#remoteurl devmode}}{{/remoteurl}}
The build process did not like that I was passing an unquoted true/false value. Quoting the devmode value in front matter fixed the problem:
---
devmode: "true"
---

Webpack generates js files with css / scss files

Description
In webpack I am using mini-css-extract-plugin:
plugins: [
new MiniCssExtractPlugin({
filename: '[name].[hash].css',
chunkFilename: '[name].[hash].css',
})
]
To load scss files in chunk files:
{
test: /\.scss$/,
use: [
{ loader: MiniCssExtractPlugin.loader, options: {
hmr: isdev,
reloadAll: true
}
},
"css-loader",
"sass-loader",
]
}
When I load a scss with an dynamic import:
import(/* webpackChunkName: "test" */ 'test.scss')
It will generate a test.[hash].css containing the styles and a test.[hash].js:
(window["webpackJsonp"] = window["webpackJsonp"] || []).push([[15],{
/***/ 81:
/***/ (function(module, exports, __webpack_require__) {
// extracted by mini-css-extract-plugin
/***/ })
}]);
Problem
I want to minimize the delay and loaded files so I find it redundant to have a nearly empty test.[hash].js file.
Do you have a way to either include the scss in the js file (see Idea 1) or to not emit/use the nearly empty js file?
Idea 1: not using mini-css-extract-plugin
My first idea was not using mini-css-extract-plugin for dynamic imported scss, but this will include a lot css-base stuff in the js (https://github.com/webpack-contrib/extract-text-webpack-plugin/issues/255).
Here is an extract of code that could interrest you. It's coded in live here, so there is maybe some error I don't know.
I use an alternative way but nearby inside my own project.
The behaviour is :
Use the Event Hook Plugin and call it when the webpack is done
Loop through each file
If file is css and have the same name as with js extension
Then remove the js file
const EventHooksPlugin = require('event-hooks-webpack-plugin');
const path = require('path');
const fs = require('fs');
const _ = require('underscore');
plugins: [
new EventHooksPlugin({
done: () => {
const publicDir = __dirname + '/public';
const files = fs.readdirSync(publicDir);
_.each(files, file => {
if (path.extname(file) !== '.css') { return ;}
const fileJs = file.replace('.css', '.js');
if (!fs.existsSync(fileJs)) {return;}
fs.unlinkSync(fileJs);
});
}
})
]

How to include a few node_modules package in babel-node

I'm trying to include #mycompany/package1, and #mycompany/package2 to be compiled along with the rest of my code using babel-node. Since package1 and package2 are in ES6. (Also note I'm not using Webpack)
In my jest config I added the below option into my jest config which works fine. When testing the code will compile the packages correctly
"transformIgnorePatterns": [
"/node_modules/(?!(#mycompany)/).*/"
],
But when trying to run babel-node I get errors.
In my babel.config.js
module.exports = {
presets: [
'#babel/preset-flow',
[
'#babel/preset-env',
{
targets: {
node: 8
}
}
]
],
plugins: ['#babel/plugin-proposal-class-properties']
};
I tried adding the below code to my babel.config.js but it still complains about ES6 errors within my node_modules/#mycompany/package1
I tried to include the viz package but then babel wouldn't compile my src files
include: [path.resolve(__dirname, 'node_modules/#mycompany/package1')]
include: ['/node_modules/((#mycompany)/).*/']
I tried to exclude everything but #mycompany packages but I still get transpile errors in my package1
exclude: [/node_modules\/(?!(#mycompany)\/).*/],
I tried playing with ignore but those don't seem like they are the right options based on reading the docs
I found out that we can do this with webpack to help bundle the packages with the rest of your code.
This is my webpack file for NodeJS.
const path = require('path');
const nodeExternals = require('webpack-node-externals');
const webpack = require('webpack');
const spawn = require('child_process').spawn;
const nodeEnv = process.env.NODE_ENV;
const isProduction = nodeEnv === 'production';
const compiler = webpack({
entry: ['#babel/polyfill', './src/server.js'],
output: {
path: path.resolve(__dirname, 'lib'),
filename: 'server.bundle.js',
libraryTarget: 'commonjs2'
},
externals: [
nodeExternals({
whitelist: [/#mycompany\/.*/]
})
],
plugins: plugins,
target: 'node',
mode: 'development',
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules\/(?!(#mycompany)\/).*/,
use: {
loader: 'babel-loader',
options: {
configFile: './babel.config.js'
}
}
}
]
}
});
if (isProduction) {
compiler.run((err, stats) => {
if (err) {
console.error(err);
return;
}
console.log(
stats.toString({
colors: true
})
);
});
} else {
let serverControl;
compiler.watch(
{
aggregateTimeout: 300,
poll: 1000
},
(err, stats) => {
if (serverControl) {
serverControl.kill();
}
if (err) {
console.error(err);
return;
}
console.log(
stats.toString({
colors: true
})
);
// change app.js to the relative path to the bundle created by webpack, if necessary
serverControl = spawn('node', [
path.resolve(__dirname, 'lib/server.bundle.js')
]);
serverControl.stdout.on('data', data => console.log(data.toString()));
serverControl.stderr.on('data', data => console.error(data.toString()));
}
);
}
Note the most important part is
Adding webpack-node-externals. Since this is a node.js server we don't need to bundle the node_modules.
Make sure you whitelist your package that you need to be compiled/bundled and also make sure you have your packages included to be compiled in your babel-loader
nodeExternal tells webpack know not to bundle ANY node_modules.
whitelist is saying that we should bundle the packages we listed
externals: [
nodeExternals({
whitelist: [/#mycompany\/.*/]
})
]
This line means to exclude all node_modules EXCEPT #mycompany/* packages
exclude: /node_modules\/(?!(#mycompany)\/).*/,

Different builds based on targeting client vs server code

I currently have 2 separate webpack builds for server rendered vs client rendered code. Is there an easy way to change the build output based on server/client build?
For example something like this:
// Have some code like this
if(is_client){
console.log('x.y.z')
} else {
server.log('x.y.z')
}
// Webpack outputs:
// replaced code in client.js
console.log('x.y.z')
// replaced code in server.js
server.log('x.y.z')
Have you tried anything like this?
// webpack.config.js
module.exports = () => ['web', 'node'].map(target => {
const config = {
target,
context: path.resolve('__dirname', 'src'),
entry: {
[target]: ['./application.js'],
},
output: {
path: path.resolve(__dirname, 'dist', target),
filename: '[name].js'
},
modules: { rules: ... },
plugins: [
new webpack.DefinePlugin({
IS_NODE: JSON.stringify(target === 'node'),
IS_WEB: JSON.stringify(target === 'web'),
}),
],
};
return config;
});
// later in your code
import logger from 'logger';
if (IS_NODE) {
logger.log('this is node js');
}
if (IS_WEB) {
console.log('this is web');
}
how the compilation works?
// client.bundle.js
import logger from 'logger';
// DefinePlugin creates a constant expression which causes the code below to be unreachable
if (false) {
logger.log('this is node js');
}
if (true) {
console.log('this is web');
}
Finally you will produce your build in production mode, so webpack will include a plugin called UglifyJS, this has a feature called dead code removal (aka tree shaking), so it will delete any unused/unreachable code.
and the final result will look like:
// node.bundle.js
import logger from 'logger';
console.log('this is node js');
//web.bundle.js
console.log('this is node js');

Hot Module Reloading is making initial page request take 10-20s, with Webpack, Koa, Vue.js

For some reason most page refreshes re-request the bundle.js file and it takes about 10-15-20 seconds to download from localhost. This all from localhost, and the bundle.js file is about 1mb in size. The request for this file only seems to crawl, loading a few kilobytes at a time.
Some observations:
After some digging it seems to be stalling out on the initial call to the server from __webpack_hmr, but I'm not sure as this call happens after the call to bundle.js. Below is the log of the server request flow.
It is only slow on pages that have more than one or two components, ie. anything other than the homepage. This alludes to the idea that it might be related to the hot module reloading.
The homepage will still take > 5s (sometimes 10-20) just like the other pages, but if I refresh the page with Ctrl+R, it comes back nearly instantly. If I do an address-bar refresh, it takes longer. The other pages still take just as long no matter if I Ctrl+R or do an address-bar reload...
Update: I removed the hot module replacement, and it definitely seems to be the source of the issue, as the pages load instantly without it.
Request log:
-- Response time GET / = 609ms
--> GET / 200 647ms 2.55kb
<-- GET /main.aafc9fb7f6a0c7f127edb04734d29547.css
--> GET /main.aafc9fb7f6a0c7f127edb04734d29547.css 200 17ms 3.43kb
<-- /bundle.js
--> GET /bundle.js 200 18ms 1.29mb
<-- GET /__webpack_hmr
And then in the chrome console, for this request it shows:
Here's my setup:
Using Koa as the server environment (using streaming/chunking in initial response)
Using webpack with hot module reloading
Using Vue.js as a frontend framework, with server-side rendering
bundle.js is served through the typical serve-static package
bundle.js doesn't seem to be being cached at all. Why is this?
On the Koa side of things, I started with some boilerplate package to do all this server-side rendering and such. This has been happening since I started messing around with this setup, and webpack in general, so I'm trying to get to the bottom of it. It seems to be a little random, where sometimes it will come back in < 1s, but most times it takes 10+ seconds. Sometimes 30+ seconds?!
I've also tried to use different libraries to serve the static files, but they all seem to do this.
Here is my main webpack config ('webpack.client', extended below):
'use strict'
const path = require('path')
const webpack = require('webpack')
const AssetsPlugin = require('assets-webpack-plugin')
const assetsPluginInstance = new AssetsPlugin({path: path.join(process.cwd(), 'build')})
const postcss = [
require('precss')()
//require('autoprefixer')({browsers: ['last 2 versions']}),
]
module.exports = {
entry: [
'./src/client-entry.js'
],
output: {
path: path.join(process.cwd(), 'build'),
filename: 'bundle.js',
publicPath: '/'
},
resolve: {
extensions: ['', '.vue', '.js', '.json']
},
module: {
loaders: [
{
test: /\.vue$/,
loaders: ['vue']
},
{
test: /\.js$/,
loaders: ['babel'],
exclude: [/node_modules/]
},
{
test: /\.json$/,
loaders: ['json'],
exclude: [/node_modules/]
},
{
test: /\.(png|jpg|gif|svg)$/,
loader: 'url?limit=10000&name=images/[hash].[ext]',
include: path.src,
},
{
test: /\.woff($|\?)|\.woff2($|\?)|\.ttf($|\?)|\.eot($|\?)|\.svg($|\?)/,
loader: 'url-loader',
include: path.src,
}
]
},
node: { net: 'empty', dns: 'empty' },
postcss,
vue: {
postcss,
loaders: {}
},
plugins: [
assetsPluginInstance
]
}
And also this (extends the previous):
'use strict'
const webpack = require('webpack')
const config = require('./webpack.client')
const ExtractTextPlugin = require('extract-text-webpack-plugin')
config.entry.push('webpack-hot-middleware/client')
//config.devtool = 'inline-eval-cheap-source-map'
config.plugins = config.plugins.concat([
new webpack.HotModuleReplacementPlugin(),
new webpack.NoErrorsPlugin(),
new webpack.DefinePlugin({
'__DEV__': true,
'process.env.NODE_ENV': JSON.stringify('development')
}),
new ExtractTextPlugin('[name].[contenthash].css')
])
config.vue.loaders = {
postcss: ExtractTextPlugin.extract(
'vue-style-loader',
'css-loader?sourceMap'
),
css: ExtractTextPlugin.extract(
'vue-style-loader',
'css-loader?sourceMap'
)
}
module.exports = config
Here is my server index.js file for Koa:
import path from 'path'
import fs from 'fs'
import Koa from 'koa'
import convert from 'koa-convert'
//import serve from 'koa-static-server'
import serveStatic from 'koa-static'
import {PassThrough} from 'stream'
import {createBundleRenderer} from 'vue-server-renderer'
import serialize from 'serialize-javascript'
import MFS from 'memory-fs'
import assets from '../build/webpack-assets'
import cookie from 'koa-cookie'
let renderer
const createRenderer = fs => {
const bundlePath = path.resolve(process.cwd(), 'build/server-bundle.js')
return createBundleRenderer(fs.readFileSync(bundlePath, 'utf-8'))
}
const app = new Koa();
app.use(cookie());
if (process.env.NODE_ENV === 'development') {
// DEVELOPMENT, with hot reload
const webpack = require('webpack')
const webpackConfig = require('../config/webpack.client.dev')
const compiler = webpack(webpackConfig)
const devMiddleware = require('koa-webpack-dev-middleware')
const hotMiddleware = require('koa-webpack-hot-middleware')
app.use(convert(devMiddleware(compiler, {
publicPath: webpackConfig.output.publicPath,
stats: {
colors: true,
modules: false,
children: false,
chunks: false,
chunkModules: false
}
})))
app.use(convert(hotMiddleware(compiler)))
// server renderer
const serverBundleConfig = require('../config/webpack.bundle')
const serverBundleCompiler = webpack(serverBundleConfig)
const mfs = new MFS()
serverBundleCompiler.outputFileSystem = mfs
serverBundleCompiler.watch({}, (err, stats) => {
if (err) throw err
stats = stats.toJson()
stats.errors.forEach(err => console.error(err))
stats.warnings.forEach(err => console.warn(err))
renderer = createRenderer(mfs)
})
}
else {
// PRODUCTION
// use nginx to serve static files in real
//app.use(convert(serve({rootDir: path.join(process.cwd(), 'build'), rootPath: '/static'})))
app.use(serveStatic(path.join(process.cwd(), 'build')));
renderer = createRenderer(fs)
}
app.use(ctx => {
var start = new Date;
ctx.type = 'text/html; charset=utf-8'
const context = {url: ctx.url}
const title = 'Tripora';
const stream = new PassThrough()
console.log("Checking if server-side cookie exists...");
// See if request sent over an authentication token in their cookies
if(ctx.cookie && ctx.cookie.token) {
console.log("Found cookie token.");
context.token = ctx.cookie.token;
}
stream.write(`<!DOCTYPE html><html style="min-height: 100%;"><head><meta charset="utf-8"/><title>${title}</title>${assets.main.css ? `<link rel="stylesheet" href="${assets.main.css}"/>` : ''}</head><body style="min-height: 100%;">`)
const renderStream = renderer.renderToStream(context)
let firstChunk = true
renderStream.on('data', chunk => {
// we tell the request to ignore files as an initial reuqest
var isPage = ctx.url.split(".").length == 1;
if (firstChunk && context.initialState && isPage) {
stream.write(`<script>window.__INITIAL_STATE__=${serialize(context.initialState, {isJSON: true})}</script>${chunk}`)
firstChunk = false
} else {
stream.write(chunk)
}
})
renderStream.on('end', () => {
stream.write(`<script src="${assets.main.js}"></script></body></html>`)
var ms = new Date - start;
//ctx.set('X-Response-Time', ms + 'ms');
console.log("-- Response time %s %s = %sms", ctx.method, ctx.originalUrl, ms);
ctx.res.end()
})
renderStream.on('error', err => {
console.log("ERROR", err.stack);
throw new Error(`something bad happened when renderToStream: ${err}`)
})
ctx.status = 200
ctx.body = stream
})
const port = process.env.NODE_PORT || 80
app.listen(port, () => {
console.log(`==> Listening at http://localhost:${port}`)
})
Anyone know why the HMR initial request would take so long, and seem to be so random (sometimes 5s, sometimes 30 seconds)? Techneregy.

Resources