MakeCallback DeprecationWarning error when running Foundation for Emails build process - node.js

I'm using Foundation for Emails Sass Version to generate HTML emails. I have made a few small changes to the gulpfile and package.json, but for the most part it is exactly what is given on the Foundation for Emails repo.
I'm getting an error when I try to run npm run build. It seems to be something I have added to my template code but I am not sure what it could be.
Here is the error:
[13:48:22] Using gulpfile ~/Development/Work/foundation-email-stack-sass-workflow/gulpfile.babel.js
[13:48:22] Starting 'default'...
[13:48:22] Starting 'build'...
[13:48:22] Starting 'clean'...
[13:48:22] Finished 'clean' after 11 ms
[13:48:22] Starting 'pages'...
[13:48:23] Finished 'pages' after 525 ms
[13:48:23] Starting 'sass'...
[13:48:35] Finished 'sass' after 12 s
[13:48:35] Starting 'images'...
[13:48:39] gulp-imagemin: Minified 27 images (saved 46.34 kB - 1.1%)
[13:48:39] Finished 'images' after 4.04 s
[13:48:39] Starting 'inline'...
(node:35425) [DEP0097] DeprecationWarning: Using a domain property in MakeCallback is deprecated. Use the async_context variant of MakeCallback or the AsyncResource class instead.
[13:48:41] The following tasks did not complete: default, build, inline
[13:48:41] Did you forget to signal async completion?
Here is my gulpfile:
import gulp from 'gulp';
import plugins from 'gulp-load-plugins';
import browser from 'browser-sync';
import rimraf from 'rimraf';
import panini from 'panini';
import yargs from 'yargs';
import lazypipe from 'lazypipe';
import inky from 'inky';
import fs from 'fs';
import siphon from 'siphon-media-query';
import path from 'path';
import merge from 'merge-stream';
import beep from 'beepbeep';
import colors from 'colors';
var helpers = require('handlebars-helpers')();
var ext_replace = require('gulp-ext-replace');
const $ = plugins();
// Look for the --production flag
const PRODUCTION = !!(yargs.argv.production);
const EMAIL = yargs.argv.to;
// Declar var so that both AWS and Litmus task can use it.
var CONFIG;
// Build the "dist" folder by running all of the below tasks
gulp.task('build',
gulp.series(clean, pages, sass, images, inline));
// Build emails, run the server, and watch for file changes
gulp.task('default',
gulp.series('build', server, watch));
// Build emails, then send to litmus
gulp.task('litmus',
gulp.series('build', creds, aws, litmus));
// Build emails, then send to EMAIL
gulp.task('mail',
gulp.series('build', creds, aws, mail));
// Build emails, then zip
gulp.task('zip',
gulp.series('build', zip));
// Delete the "dist" folder
// This happens every time a build starts
function clean(done) {
rimraf('dist', done);
}
// Compile layouts, pages, and partials into flat HTML files
// Then parse using Inky templates
function pages() {
return gulp.src(['src/pages/**/*.{html,hbs,handlebars}', '!src/pages/archive/**/*.{html,hbs,handlebars}'])
.pipe(panini({
root: 'src/pages',
layouts: 'src/layouts',
partials: 'src/partials',
helpers: 'src/helpers'
}))
.pipe(inky())
.pipe(ext_replace('.html'))
.pipe(gulp.dest('dist'));
}
// Reset Panini's cache of layouts and partials
function resetPages(done) {
panini.refresh();
done();
}
// Compile Sass into CSS
function sass() {
return gulp.src('src/assets/scss/**/*.scss')
.pipe($.if(!PRODUCTION, $.sourcemaps.init()))
.pipe($.sass({
includePaths: ['node_modules/foundation-emails/scss']
}).on('error', $.sass.logError))
.pipe($.if(PRODUCTION, $.uncss(
{
html: ['dist/**/*.html']
})))
.pipe($.if(!PRODUCTION, $.sourcemaps.write()))
.pipe(gulp.dest('dist/css'));
}
// Copy and compress images
function images() {
return gulp.src(['src/assets/img/**/*', '!src/assets/img/archive/**/*'])
.pipe($.imagemin())
.pipe(gulp.dest('./dist/assets/img'));
}
// Inline CSS and minify HTML
function inline() {
return gulp.src('dist/**/*.html')
.pipe($.if(PRODUCTION, inliner('dist/css/app.css')))
.pipe(gulp.dest('dist'));
}
// Start a server with LiveReload to preview the site in
function server(done) {
browser.init({
server: 'dist'
});
done();
}
// Watch for file changes
function watch() {
gulp.watch('src/pages/**/*.{html,hbs,handlebars}').on('all', gulp.series(pages, inline, browser.reload));
gulp.watch(['src/layouts/**/*', 'src/partials/**/*']).on('all', gulp.series(resetPages, pages, inline, browser.reload));
gulp.watch(['../scss/**/*.scss', 'src/assets/scss/**/*.scss']).on('all', gulp.series(resetPages, sass, pages, inline, browser.reload));
gulp.watch('src/assets/img/**/*').on('all', gulp.series(images, browser.reload));
}
// Inlines CSS into HTML, adds media query CSS into the <style> tag of the email, and compresses the HTML
function inliner(css) {
var css = fs.readFileSync(css).toString();
var mqCss = siphon(css);
var pipe = lazypipe()
.pipe($.inlineCss, {
applyStyleTags: false,
removeStyleTags: true,
preserveMediaQueries: true,
removeLinkTags: false
})
.pipe($.replace, '<!-- <style> -->', `<style>${mqCss}</style>`)
.pipe($.replace, '<link rel="stylesheet" type="text/css" href="css/app.css">', '')
.pipe($.htmlmin, {
collapseWhitespace: true,
minifyCSS: true
});
return pipe();
}
// Ensure creds for Litmus are at least there.
function creds(done) {
var configPath = './config.json';
try { CONFIG = JSON.parse(fs.readFileSync(configPath)); }
catch(e) {
beep();
console.log('[AWS]'.bold.red + ' Sorry, there was an issue locating your config.json. Please see README.md');
process.exit();
}
done();
}
// Post images to AWS S3 so they are accessible to Litmus and manual test
function aws() {
var publisher = !!CONFIG.aws ? $.awspublish.create(CONFIG.aws) : $.awspublish.create();
var headers = {
'Cache-Control': 'max-age=315360000, no-transform, public'
};
return gulp.src('./dist/assets/img/*')
// publisher will add Content-Length, Content-Type and headers specified above
// If not specified it will set x-amz-acl to public-read by default
.pipe(publisher.publish(headers))
// create a cache file to speed up consecutive uploads
//.pipe(publisher.cache())
// print upload updates to console
.pipe($.awspublish.reporter());
}
// Send email to Litmus for testing. If no AWS creds then do not replace img urls.
function litmus() {
var awsURL = !!CONFIG && !!CONFIG.aws && !!CONFIG.aws.url ? CONFIG.aws.url : false;
return gulp.src('dist/**/*.html')
.pipe($.if(!!awsURL, $.replace(/=('|")(\/?assets\/img)/g, "=$1"+ awsURL)))
.pipe($.litmus(CONFIG.litmus))
.pipe(gulp.dest('dist'));
}
// Send email to specified email for testing. If no AWS creds then do not replace img urls.
function mail() {
var awsURL = !!CONFIG && !!CONFIG.aws && !!CONFIG.aws.url ? CONFIG.aws.url : false;
if (EMAIL) {
CONFIG.mail.to = [EMAIL];
}
return gulp.src('dist/**/*.html')
.pipe($.if(!!awsURL, $.replace(/=('|")(\/?assets\/img)/g, "=$1"+ awsURL)))
.pipe($.mail(CONFIG.mail))
.pipe(gulp.dest('dist'));
}
// Copy and compress into Zip
function zip() {
var dist = 'dist';
var ext = '.html';
function getHtmlFiles(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
var fileExt = path.join(dir, file);
var isHtml = path.extname(fileExt) == ext;
return fs.statSync(fileExt).isFile() && isHtml;
});
}
var htmlFiles = getHtmlFiles(dist);
var moveTasks = htmlFiles.map(function(file){
var sourcePath = path.join(dist, file);
var fileName = path.basename(sourcePath, ext);
var moveHTML = gulp.src(sourcePath)
.pipe($.rename(function (path) {
path.dirname = fileName;
return path;
}));
var moveImages = gulp.src(sourcePath)
.pipe($.htmlSrc({ selector: 'img'}))
.pipe($.rename(function (path) {
path.dirname = fileName + path.dirname.replace('dist', '');
return path;
}));
return merge(moveHTML, moveImages)
.pipe($.zip(fileName+ '.zip'))
.pipe(gulp.dest('dist'));
});
return merge(moveTasks);
}
And my package.json:
{
"name": "foundation-emails-template",
"version": "1.0.0",
"description": "Basic template for a Foundation for Emails project.",
"repository": "zurb/foundation-emails-template",
"main": "gulpfile.babel.js",
"scripts": {
"start": "gulp",
"build": "gulp --production",
"zip": "gulp zip --production",
"litmus": "gulp litmus --production",
"mail": "gulp mail --production"
},
"author": "ZURB <foundation#zurb.com>",
"license": "MIT",
"dependencies": {
"foundation-emails": "^2.2.1",
"handlebars-helpers": "^0.10.0"
},
"devDependencies": {
"babel-core": "^6.3.26",
"babel-preset-es2015": "^6.3.13",
"babel-register": "^6.7.2",
"beepbeep": "^1.2.0",
"browser-sync": "^2.11.0",
"colors": "^1.1.2",
"gulp": ">=4.0",
"gulp-awspublish": "^3.0.1",
"gulp-cli": "^1.1.0",
"gulp-ext-replace": "^0.3.0",
"gulp-html-src": "^1.0.0",
"gulp-htmlmin": "^1.1.1",
"gulp-if": "^2.0.0",
"gulp-imagemin": "^2.4.0",
"gulp-inline-css": "^3.0.0",
"gulp-litmus": "0.0.7",
"gulp-load-plugins": "^1.1.0",
"gulp-mail": "^0.1.1",
"gulp-rename": "^1.2.2",
"gulp-replace": "^0.5.4",
"gulp-sass": "^2.1.0",
"gulp-sourcemaps": "^1.6.0",
"gulp-uncss": "^1.0.1",
"gulp-zip": "^3.2.0",
"inky": "^1.3.6",
"lazypipe": "^1.0.1",
"merge-stream": "^1.0.0",
"panini": "^1.3.0",
"rimraf": "^2.3.3",
"siphon-media-query": "^1.0.0",
"yargs": "^4.1.0"
},
"babel": {
"presets": [
"es2015"
]
}
}
Suggestions?

so, I added a custom helper:
module.exports = function( content ) {
var devmode = content;
if( devmode === true ) {
return "/";
} else {
return "http:*****";
}
}
which used a value in the pages front matter to change URLs:
---
devmode: true
devmode: false
---
to insert a value into pages:
{{#remoteurl devmode}}{{/remoteurl}}
The build process did not like that I was passing an unquoted true/false value. Quoting the devmode value in front matter fixed the problem:
---
devmode: "true"
---

Related

Babel error: Unknown option: .config when trying to upgrade dependencies of Inferno app

I have an app which is created with Create Inferno App. I ejected at some point to customize build configs. Now I am trying to upgrade dependencies to the latest versions and replace deprecated dependencies. When trying to build my app. I get this error:
yarn run v1.22.19
$ INLINE_RUNTIME_CHUNK=false node scripts/devbuild.js
Creating a development build...
Failed to compile.
Error: [BABEL] /home/farooqkz/playground/chooj/src/index.js: Unknown option: .config. Check out https://babeljs.io/docs/en/babel-core/#options for more information about options.
error Command failed with exit code 1.
info Visit https://yarnpkg.com/en/docs/cli/run for documentation about this command.
Unfortunately, the error message doesn't say where does it come from. Here's my package.json:
{
"name": "chooj",
"version": "0.0.0",
"private": true,
"dependencies": {
"#svgr/webpack": "^6.5.1",
"KaiUI": "git+https://github.com/farooqkz/KaiUIv2.git",
"#babel/core": "^7.20.2",
"#babel/eslint-parser": "^7.19.0",
"babel-loader": "8.0.4",
"babel-preset-inferno-app": "^8.0.3",
"bfj": "^7.0.0",
"case-sensitive-paths-webpack-plugin": "2.4.0",
"chalk": "2.4.1",
"classnames": "^2.3.1",
"core-js": "^3.26.1",
"css-loader": "^6.7.0",
"dotenv": "16.0.3",
"dotenv-expand": "9.0.0",
"eslint": "^8.27.0",
"eslint-config-inferno-app": "^7.0.2",
"eslint-webpack-plugin": "3.2.0",
"eslint-plugin-flowtype": "^8.0.0",
"eslint-plugin-import": "2.14.0",
"eslint-plugin-inferno": "^7.11.0",
"eslint-plugin-jsx-a11y": "6.1.2",
"file-loader": "^6.2.0",
"fs-extra": "7.0.1",
"html-webpack-plugin": "5.5.0",
"identity-obj-proxy": "3.0.0",
"inferno": "^8.0.0",
"inferno-dev-utils": "^6.0.4",
"inferno-extras": "^8.0.0",
"jsqr": "^1.4.0",
"localforage": "^1.9.0",
"matrix-js-sdk": "^21.1.0",
"mini-css-extract-plugin": "^2.6.0",
"css-minimizer-webpack-plugin": "4.2.2",
"pnp-webpack-plugin": "1.7.0",
"postcss-flexbugs-fixes": "4.1.0",
"postcss-loader": "^7.0.0",
"postcss-preset-env": "^7.8.0",
"postcss-safe-parser": "^6.0.0",
"prettier": "^2.3.2",
"querystring-browser": "^1.0.4",
"react-app-polyfill": "^3.0.0",
"resolve": "^1.22.0",
"sass-loader": "^13.0.0",
"style-loader": "^3.3.0",
"terser-webpack-plugin": "5.3.6",
"url-loader": "1.1.2",
"webpack": "^5.75.0",
"webpack-dev-server": "^4.11.1"
},
"scripts": {
"start": "node scripts/start.js",
"devbuild": "INLINE_RUNTIME_CHUNK=false node scripts/devbuild.js"
},
"eslintConfig": {
"extends": "inferno-app"
},
"browserslist": "ff 48",
"babel": {
"presets": [
"inferno-app"
]
},
"devDependencies": {
"babel-plugin-transform-replace-expressions": "^0.2.0"
}
}
And here's the webpack config I use:
'use strict';
const path = require('path');
const webpack = require('webpack');
const PnpWebpackPlugin = require('pnp-webpack-plugin');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const CaseSensitivePathsPlugin = require('case-sensitive-paths-webpack-plugin');
const ESLintPlugin = require('eslint-webpack-plugin');
const InterpolateHtmlPlugin = require('inferno-dev-utils/InterpolateHtmlPlugin');
const WatchMissingNodeModulesPlugin = require('inferno-dev-utils/WatchMissingNodeModulesPlugin');
const ModuleScopePlugin = require('inferno-dev-utils/ModuleScopePlugin');
const getCSSModuleLocalIdent = require('inferno-dev-utils/getCSSModuleLocalIdent');
const getClientEnvironment = require('./env');
const paths = require('./paths');
const ModuleNotFoundPlugin = require('inferno-dev-utils/ModuleNotFoundPlugin');
// Webpack uses `publicPath` to determine where the app is being served from.
// In development, we always serve from the root. This makes config easier.
const publicPath = '/';
// `publicUrl` is just like `publicPath`, but we will provide it to our app
// as %PUBLIC_URL% in `index.html` and `process.env.PUBLIC_URL` in JavaScript.
// Omit trailing slash as %PUBLIC_PATH%/xyz looks better than %PUBLIC_PATH%xyz.
const publicUrl = '';
// Get environment variables to inject into our app.
const env = getClientEnvironment(publicUrl);
// style files regexes
const cssRegex = /\.css$/;
const cssModuleRegex = /\.module\.css$/;
const sassRegex = /\.(scss|sass)$/;
const sassModuleRegex = /\.module\.(scss|sass)$/;
// common function to get style loaders
const getStyleLoaders = (cssOptions, preProcessor) => {
const loaders = [
require.resolve('style-loader'),
{
loader: require.resolve('css-loader'),
options: cssOptions,
},
{
// Options for PostCSS as we reference these options twice
// Adds vendor prefixing based on your specified browser support in
// package.json
loader: require.resolve('postcss-loader'),
options: {
// Necessary for external CSS imports to work
// https://github.com/facebook/create-react-app/issues/2677
ident: 'postcss',
plugins: () => [
require('postcss-flexbugs-fixes'),
require('postcss-preset-env')({
autoprefixer: {
flexbox: 'no-2009',
},
stage: 3,
}),
],
},
},
];
if (preProcessor) {
loaders.push(require.resolve(preProcessor));
}
return loaders;
};
// This is the development configuration.
// It is focused on developer experience and fast rebuilds.
// The production configuration is different and lives in a separate file.
module.exports = {
mode: 'development',
// These are the "entry points" to our application.
// This means they will be the "root" imports that are included in JS bundle.
entry: [
// Include an alternative client for WebpackDevServer. A client's job is to
// connect to WebpackDevServer by a socket and get notified about changes.
// When you save a file, the client will either apply hot updates (in case
// of CSS changes), or refresh the page (in case of JS changes). When you
// make a syntax error, this client will display a syntax error overlay.
// Note: instead of the default WebpackDevServer client, we use a custom one
// to bring better experience for Create Inferno App users. You can replace
// the line below with these two lines if you prefer the stock client:
// require.resolve('webpack-dev-server/client') + '?/',
// require.resolve('webpack/hot/dev-server'),
//require.resolve('inferno-dev-utils/webpackHotDevClient'),
// Finally, this is your app's code:
paths.appIndexJs,
// We include the app code last so that if there is a runtime error during
// initialization, it doesn't blow up the WebpackDevServer client, and
// changing JS code would still trigger a refresh.
],
output: {
// Add /* filename */ comments to generated require()s in the output.
path: paths.appBuild,
pathinfo: true,
// This does not produce a real file. It's just the virtual path that is
// served by WebpackDevServer in development. This is the JS bundle
// containing code from all our entry points, and the Webpack runtime.
filename: 'static/js/[name].[fullhash:6].js',
// There are also additional JS chunk files if you use code splitting.
chunkFilename: 'static/js/[id].[chunkhash].js',
// This is the URL that app is served from. We use "/" in development.
publicPath: publicPath,
// Point sourcemap entries to original disk location (format as URL on Windows)
devtoolModuleFilenameTemplate: info =>
path.resolve(info.absoluteResourcePath).replace(/\\/g, '/'),
},
optimization: {
// Automatically split vendor and commons
// https://twitter.com/wSokra/status/969633336732905474
// https://medium.com/webpack/webpack-4-code-splitting-chunk-graph-and-the-splitchunks-optimization-be739a861366
splitChunks: {
chunks: 'all',
name: false,
},
runtimeChunk: false,
},
resolve: {
// This allows you to set a fallback for where Webpack should look for modules.
// We placed these paths second because we want `node_modules` to "win"
// if there are any conflicts. This matches Node resolution mechanism.
// https://github.com/facebook/create-react-app/issues/253
modules: ['node_modules'].concat(
// It is guaranteed to exist because we tweak it in `env.js`
process.env.NODE_PATH.split(path.delimiter).filter(Boolean)
),
// These are the reasonable defaults supported by the Node ecosystem.
// We also include JSX as a common component filename extension to support
// some tools, although we do not recommend using it, see:
// https://github.com/facebook/create-react-app/issues/290
// `web` extension prefixes have been added for better support
// for React Native Web.
extensions: ['.mjs', '.web.js', '.js', '.json', '.web.jsx', '.jsx'],
alias: {
react: 'inferno-compat',
'react-dom': 'inferno-compat',
// These aliases makes sure all inferno imports and react aliases resolve into same script entry and no duplicates are made
inferno: path.resolve(require.resolve('inferno/dist/index.dev.esm.js')),
// 'inferno-clone-vnode': path.resolve(require.resolve('inferno-clone-vnode/dist/index.dev.esm.js')),
// 'inferno-compat': path.resolve(require.resolve('inferno-compat/dist/index.dev.esm.js')),
// 'inferno-component': path.resolve(require.resolve('inferno-component/dist/index.dev.esm.js')),
// 'inferno-create-class': path.resolve(require.resolve('inferno-create-class/dist/index.dev.esm.js')),
// 'inferno-create-element': path.resolve(require.resolve('inferno-create-element/dist/index.dev.esm.js')),
// 'inferno-devtools': path.resolve(require.resolve('inferno-devtools/dist/index.dev.esm.js')),
// 'inferno-extras': path.resolve(require.resolve('inferno-extras/dist/index.dev.esm.js')),
// 'inferno-hydrate': path.resolve(require.resolve('inferno-hydrate/dist/index.dev.esm.js')),
// 'inferno-hyperscript': path.resolve(require.resolve('inferno-hyperscript/dist/index.dev.esm.js')),
// 'inferno-mobx': path.resolve(require.resolve('inferno-mobx/dist/index.dev.esm.js')),
// 'inferno-redux': path.resolve(require.resolve('inferno-redux/dist/index.dev.esm.js')),
// 'inferno-router': path.resolve(require.resolve('inferno-router/dist/index.dev.esm.js')),
// 'inferno-server': path.resolve(require.resolve('inferno-server/dist/index.dev.esm.js')),
// 'inferno-test-utils': path.resolve(require.resolve('inferno-test-utils/dist/index.dev.esm.js')),
// 'inferno-vnode-flags': path.resolve(require.resolve('inferno-vnode-flags/dist/index.dev.esm.js'))
},
plugins: [
// Adds support for installing with Plug'n'Play, leading to faster installs and adding
// guards against forgotten dependencies and such.
//PnpWebpackPlugin,
// Prevents users from importing files from outside of src/ (or node_modules/).
// This often causes confusion because we only process files within src/ with babel.
// To fix this, we prevent you from importing files out of src/ -- if you'd like to,
// please link the files into your node_modules/ and let module-resolution kick in.
// Make sure your source files are compiled, as they will not be processed in any way.
new ModuleScopePlugin(paths.appSrc, [paths.appPackageJson]),
],
},
resolveLoader: {
modules: ['node_modules'],
extensions: ['.js', '.json'],
mainFields: ['loader', 'main'],
plugins: [
// Also related to Plug'n'Play, but this time it tells Webpack to load its loaders
// from the current package.
PnpWebpackPlugin.moduleLoader(module),
],
},
module: {
strictExportPresence: true,
rules: [
// Disable require.ensure as it's not a standard language feature.
{ parser: { requireEnsure: false } },
{
// "oneOf" will traverse all following loaders until one will
// match the requirements. When no loader matches it will fall
// back to the "file" loader at the end of the loader list.
oneOf: [
// "url" loader works like "file" loader except that it embeds assets
// smaller than specified limit in bytes as data URLs to avoid requests.
// A missing `test` is equivalent to a match.
{
test: [/\.bmp$/, /\.gif$/, /\.jpe?g$/, /\.png$/],
loader: require.resolve('url-loader'),
options: {
limit: 10000,
name: 'static/media/[name].[hash:8].[ext]',
},
},
// Process application JS with Babel.
// The preset includes JSX, Flow, and some ESnext features.
{
test: /\.(js|mjs|jsx)$/,
include: paths.appSrc,
loader: require.resolve('babel-loader'),
options: {
presets: [
'babel-preset-inferno-app/webpack-overrides'
],
// This is a feature of `babel-loader` for webpack (not Babel itself).
// It enables caching results in ./node_modules/.cache/babel-loader/
// directory for faster rebuilds.
cacheDirectory: true,
// Don't waste time on Gzipping the cache
cacheCompression: false,
},
},
// Process any JS outside of the app with Babel.
// Unlike the application JS, we only compile the standard ES features.
{
test: /\.(js|mjs)$/,
exclude: /#babel(?:\/|\\{1,2})runtime/,
include: /node_modules\/.+\.js$/,
loader: require.resolve('babel-loader'),
options: {
babelrc: false,
configFile: false,
compact: false,
presets: [
[
'babel-preset-inferno-app/dependencies',
{ helpers: true },
],
],
cacheDirectory: true,
// Don't waste time on Gzipping the cache
cacheCompression: false,
// If an error happens in a package, it's possible to be
// because it was compiled. Thus, we don't want the browser
// debugger to show the original code. Instead, the code
// being evaluated would be much more helpful.
sourceMaps: false,
},
},
// "postcss" loader applies autoprefixer to our CSS.
// "css" loader resolves paths in CSS and adds assets as dependencies.
// "style" loader turns CSS into JS modules that inject <style> tags.
// In production, we use a plugin to extract that CSS to a file, but
// in development "style" loader enables hot editing of CSS.
// By default we support CSS Modules with the extension .module.css
{
test: cssRegex,
exclude: cssModuleRegex,
use: getStyleLoaders({
importLoaders: 1,
}),
},
// Adds support for CSS Modules (https://github.com/css-modules/css-modules)
// using the extension .module.css
{
test: cssModuleRegex,
use: getStyleLoaders({
importLoaders: 1,
modules: true,
getLocalIdent: getCSSModuleLocalIdent,
}),
},
// Opt-in support for SASS (using .scss or .sass extensions).
// Chains the sass-loader with the css-loader and the style-loader
// to immediately apply all styles to the DOM.
// By default we support SASS Modules with the
// extensions .module.scss or .module.sass
{
test: sassRegex,
exclude: sassModuleRegex,
use: getStyleLoaders({ importLoaders: 2 }, 'sass-loader'),
},
// Adds support for CSS Modules, but using SASS
// using the extension .module.scss or .module.sass
{
test: sassModuleRegex,
use: getStyleLoaders(
{
importLoaders: 2,
modules: true,
getLocalIdent: getCSSModuleLocalIdent,
},
'sass-loader'
),
},
// "file" loader makes sure those assets get served by WebpackDevServer.
// When you `import` an asset, you get its (virtual) filename.
// In production, they would get copied to the `build` folder.
// This loader doesn't use a "test" so it will catch all modules
// that fall through the other loaders.
{
// Exclude `js` files to keep "css" loader working as it injects
// its runtime that would otherwise be processed through "file" loader.
// Also exclude `html` and `json` extensions so they get processed
// by webpacks internal loaders.
exclude: [/\.(js|mjs|jsx)$/, /\.html$/, /\.json$/],
loader: require.resolve('file-loader'),
options: {
name: 'static/media/[name].[hash:8].[ext]',
},
},
],
},
// ** STOP ** Are you adding a new loader?
// Make sure to add the new loader(s) before the "file" loader.
],
},
plugins: [
// Generates an `index.html` file with the <script> injected.
new HtmlWebpackPlugin({
inject: true,
template: paths.appHtml,
}),
// Makes some environment variables available in index.html.
// The public URL is available as %PUBLIC_URL% in index.html, e.g.:
// <link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico">
// In development, this will be an empty string.
new InterpolateHtmlPlugin(HtmlWebpackPlugin, env.raw),
// This gives some necessary context to module not found errors, such as
// the requesting resource.
new ModuleNotFoundPlugin(paths.appPath),
// Makes some environment variables available to the JS code, for example:
// if (process.env.NODE_ENV === 'development') { ... }. See `./env.js`.
new webpack.DefinePlugin(env.stringified),
// This is necessary to emit hot updates (currently CSS only):
//new webpack.HotModuleReplacementPlugin(),
// Watcher doesn't work well if you mistype casing in a path so we use
// a plugin that prints an error when you attempt to do this.
// See https://github.com/facebook/create-react-app/issues/240
new CaseSensitivePathsPlugin(),
// If you require a missing module and then `npm install` it, you still have
// to restart the development server for Webpack to discover it. This plugin
// makes the discovery automatic so you don't have to restart.
// See https://github.com/facebook/create-react-app/issues/186
new WatchMissingNodeModulesPlugin(paths.appNodeModules),
// Moment.js is an extremely popular library that bundles large locale files
// by default due to how Webpack interprets its code. This is a practical
// solution that requires the user to opt into importing specific locales.
// https://github.com/jmblog/how-to-optimize-momentjs-with-webpack
new ESLintPlugin(),
],
// Turn off performance processing because we utilize
// our own hints via the FileSizeReporter
performance: false,
};
And finally, the build script itself:
'use strict';
// Do this as the first thing so that any code reading it knows the right env.
process.env.BABEL_ENV = 'development';
process.env.NODE_ENV = 'development';
// Ensure environment variables are read.
require('../config/env');
const path = require('path');
const chalk = require('chalk');
const fs = require('fs-extra');
const webpack = require('webpack');
const bfj = require('bfj');
const config = require('../config/webpack.config.devbuild');
const paths = require('../config/paths');
const checkRequiredFiles = require('inferno-dev-utils/checkRequiredFiles');
const formatWebpackMessages = require('inferno-dev-utils/formatWebpackMessages');
const printHostingInstructions = require('inferno-dev-utils/printHostingInstructions');
const FileSizeReporter = require('inferno-dev-utils/FileSizeReporter');
const printBuildError = require('inferno-dev-utils/printBuildError');
const measureFileSizesBeforeBuild =
FileSizeReporter.measureFileSizesBeforeBuild;
const printFileSizesAfterBuild = FileSizeReporter.printFileSizesAfterBuild;
const useYarn = fs.existsSync(paths.yarnLockFile);
// These sizes are pretty large. We'll warn for bundles exceeding them.
const WARN_AFTER_BUNDLE_GZIP_SIZE = 512 * 1024;
const WARN_AFTER_CHUNK_GZIP_SIZE = 1024 * 1024;
const isInteractive = process.stdout.isTTY;
// Warn and crash if required files are missing
if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) {
process.exit(1);
}
// Process CLI arguments
const argv = process.argv.slice(2);
const writeStatsJson = argv.indexOf('--stats') !== -1;
// We require that you explictly set browsers and do not fall back to
// browserslist defaults.
const { checkBrowsers } = require('inferno-dev-utils/browsersHelper');
checkBrowsers(paths.appPath, isInteractive)
.then(() => {
// First, read the current file sizes in build directory.
// This lets us display how much they changed later.
return measureFileSizesBeforeBuild(paths.appBuild);
})
.then(previousFileSizes => {
// Remove all content but keep the directory so that
// if you're in it, you don't end up in Trash
fs.emptyDirSync(paths.appBuild);
// Merge with the public folder
copyPublicFolder();
// Start the webpack build
return build(previousFileSizes);
})
.then(
({ stats, previousFileSizes, warnings }) => {
if (warnings.length) {
console.log(chalk.yellow('Compiled with warnings.\n'));
console.log(warnings.join('\n\n'));
console.log(
'\nSearch for the ' +
chalk.underline(chalk.yellow('keywords')) +
' to learn more about each warning.'
);
console.log(
'To ignore, add ' +
chalk.cyan('// eslint-disable-next-line') +
' to the line before.\n'
);
} else {
console.log(chalk.green('Compiled successfully.\n'));
}
console.log('File sizes after gzip:\n');
printFileSizesAfterBuild(
stats,
previousFileSizes,
paths.appBuild,
WARN_AFTER_BUNDLE_GZIP_SIZE,
WARN_AFTER_CHUNK_GZIP_SIZE
);
console.log();
const appPackage = require(paths.appPackageJson);
const publicUrl = paths.publicUrl;
const publicPath = config.output.publicPath;
const buildFolder = path.relative(process.cwd(), paths.appBuild);
printHostingInstructions(
appPackage,
publicUrl,
publicPath,
buildFolder,
useYarn
);
},
err => {
console.log(chalk.red('Failed to compile.\n'));
printBuildError(err);
process.exit(1);
}
)
.catch(err => {
if (err && err.message) {
console.log(err.message);
}
process.exit(1);
}).then(() => {
let jsFile = paths.appBuild + '/static/js/0.chunk.js';
fs.readFile(jsFile).then(buf => {
let s = buf.toString();
s = s.replace('new XHR()', 'new XHR({mozSystem:true})');
s = s.replace('new global.XMLHttpRequest()', 'new global.XMLHttpRequest({mozSystem:true})');
// These two "replace"s are necessary to make WebRTC fully work on KaiOS pre-3.0
s = s.replace(/setRemoteDescription\(([a-zA-Z0-9\.]+)\)/g, (match, p1) => {
return `setRemoteDescription(new RTCSessionDescription(${p1}))`;
});
s = s.replace(/addIceCandidate\(([a-zA-Z0-9\.]+)\)/g, (match, p1) => {
return `addIceCandidate(new RTCIceCandidate(${p1}))`;
});
s = s.replace("!(!remoteStream", "(!remoteStream");
fs.writeFile(jsFile, s).catch(err => {
if (err && err.message) {
console.log(err.message);
}
});
});
});
// Create the production build and print the deployment instructions.
function build(previousFileSizes) {
console.log('Creating a development build...');
let compiler = webpack(config);
return new Promise((resolve, reject) => {
compiler.run((err, stats) => {
let messages;
if (err) {
if (!err.message) {
return reject(err);
}
messages = formatWebpackMessages({
errors: [err.message],
warnings: [],
});
} else {
messages = formatWebpackMessages(
stats.toJson({ all: false, warnings: true, errors: true })
);
}
if (messages.errors.length) {
// Only keep the first error. Others are often indicative
// of the same problem, but confuse the reader with noise.
if (messages.errors.length > 1) {
messages.errors.length = 1;
}
return reject(new Error(messages.errors.join('\n\n')));
}
if (
process.env.CI &&
(typeof process.env.CI !== 'string' ||
process.env.CI.toLowerCase() !== 'false') &&
messages.warnings.length
) {
console.log(
chalk.yellow(
'\nTreating warnings as errors because process.env.CI = true.\n' +
'Most CI servers set it automatically.\n'
)
);
return reject(new Error(messages.warnings.join('\n\n')));
}
const resolveArgs = {
stats,
previousFileSizes,
warnings: messages.warnings,
};
if (writeStatsJson) {
return bfj
.write(paths.appBuild + '/bundle-stats.json', stats.toJson())
.then(() => resolve(resolveArgs))
.catch(error => reject(new Error(error)));
}
return resolve(resolveArgs);
});
});
}
function copyPublicFolder() {
fs.copySync(paths.appPublic, paths.appBuild, {
dereference: true,
filter: file => file !== paths.appHtml,
});
}

Error while deploying a smart contract to Mumbai testnet through Hardhat

I've been having this problem while trying to deploy a smart contract to the Mumbai testnet using Hardhat, and I keep getting the following error:
Error HH9: Error while loading Hardhat's configuration.
You probably tried to import the "hardhat" module from your config or a file imported from it.
This is not possible, as Hardhat can't be initialized while its config is being defined.
To learn more about how to access the Hardhat Runtime Environment from different contexts go to https://hardhat.org/hre
Here's my smart contract code:
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.1;
// implements the ERC721 standard
import "#openzeppelin/contracts/token/ERC721/ERC721.sol";
// keeps track of the number of tokens issued
import "#openzeppelin/contracts/utils/Counters.sol";
import "#openzeppelin/contracts/access/Ownable.sol";
// Here we need to get the contract object sent from the frontend React app and replace the properties of the contract hereunder
// Accessing the Ownable method ensures that only the creator of the smart contract can interact with it
contract myContract is ERC721, Ownable {
using Counters for Counters.Counter;
Counters.Counter private currentTokenId;
/// #dev Base token URI used as a prefix by tokenURI().
string public baseTokenURI;
constructor() ERC721("MyToken", "MTK") {
baseTokenURI = "";
}
function mintTo(address recipient) public returns (uint256) {
currentTokenId.increment();
uint256 newItemId = currentTokenId.current();
_safeMint(recipient, newItemId);
return newItemId;
}
/// #dev Returns an URI for a given token ID
function _baseURI() internal view virtual override returns (string memory) {
return baseTokenURI;
}
/// #dev Sets the base token URI prefix.
function setBaseTokenURI(string memory _baseTokenURI) public {
baseTokenURI = _baseTokenURI;
}
}
Here's the deploy script:
const { ethers } = require("hardhat");
async function main() {
// Fetching the compiled contract using ethers.js
const contract = await ethers.getContractFactory("myContract");
// calling deploy() will return an async Promise that we can await on
const CustomSC = await contract.deploy();
console.log(`Contract deployed to address: ${CustomSC.address}`);
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});
And here's my hardhat.config file:
/**
* #type import('hardhat/config').HardhatUserConfig
*/
require('dotenv').config();
require("#nomiclabs/hardhat-ethers");
require("#nomiclabs/hardhat-waffle");
require("./scripts/deploy.js");
require("#nomiclabs/hardhat-etherscan");
const { MORALIS_POLYGON_KEY, POLYGONSCAN_API_KEY, ACCOUNT_PRIVATE_KEY } = process.env;
module.exports = {
solidity: "0.8.1",
defaultNetwork: "mumbai",
networks: {
hardhat: {},
mumbai: {
url: `${MORALIS_POLYGON_KEY}`,
accounts: [`0x${ACCOUNT_PRIVATE_KEY}`],
},
},
etherscan: {
apiKey: POLYGONSCAN_API_KEY,
},
};
And here's my package.json file:
{
"name": "backend",
"version": "1.0.0",
"description": "backend for the NFT Marketplace dApp",
"main": "src/server.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "nodemon src/server.js",
"build": "node src/server.js"
},
"author": "Ayed Oukhay",
"license": "ISC",
"dependencies": {
"#openzeppelin/contracts": "^4.0.0",
"body-parser": "^1.20.0",
"cors": "^2.8.5",
"dotenv": "^16.0.0",
"express": "^4.18.1",
"helmet": "^5.0.2",
"mongodb": "^4.5.0",
"mongoose": "^6.3.2",
"nodemon": "^2.0.16",
"web3": "^1.7.3"
},
"devDependencies": {
"#nomiclabs/hardhat-ethers": "^2.0.6",
"#nomiclabs/hardhat-etherscan": "^3.0.3",
"#nomiclabs/hardhat-waffle": "^2.0.3",
"chai": "^4.3.6",
"ethereum-waffle": "^3.4.4",
"ethers": "^5.6.6",
"hardhat": "^2.9.5"
}
}
When I tried fixing it by replacing the following line:
const { ethers } = require("hardhat");
with: const { ethers } = require("hardhat/config");
I get the following error:
TypeError: Cannot read property 'getContractFactory' of undefined
And even when I replaced the deploy.js code with one that's based on tasks and helpers, it compiles successfully but the npx hardhat run scripts/deploy.js --network mumbai it doesn't return anything.
here's the code that I replaced it with:
deploy.js
const { task } = require("hardhat/config");
const { getAccount } = require("./helpers.js");
task("deploy", "Deploys the smart contract ...").setAction(async function (taskArguments, hre) {
const myContractFactory = await hre.ethers.getContractFactory("myContract", getAccount());
console.log('Deploying myContract...');
const contract = await myContractFactory.deploy();
await contract.deployed();
console.log(`Contract deployed to address: ${contract.address}`);
});
and helpers.js
const { ethers } = require("ethers");
const { getContractAt } = require("#nomiclabs/hardhat-ethers/internal/helpers");
// Helper method for fetching environment variables from .env
function getEnvVariable(key, defaultValue) {
if (process.env[key]) {
return process.env[key];
}
if (!defaultValue) {
throw `${key} is not defined and no default value was provided`;
}
return defaultValue;
}
// Helper method for fetching a connection provider to the Ethereum network
function getProvider() {
return ethers.getDefaultProvider(getEnvVariable("NETWORK", "mumbai"), {
moralis: getEnvVariable("MORALIS_POLYGON_KEY"),
});
}
// RQ:: The getProvider() helper also lets us use other EVM networks (like Ethereum mainnet or Polygon) by optionally setting a NETWORK environment variable in .env.
// Helper method for fetching a wallet account using an environment variable for the PK
function getAccount() {
return new ethers.Wallet(getEnvVariable("ACCOUNT_PRIVATE_KEY"), getProvider());
}
// Helper method for fetching a contract instance at a given address
function getContract(contractName, hre) {
const account = getAccount();
return getContractAt(hre, contractName, getEnvVariable("NFT_CONTRACT_ADDRESS"), account);
}
module.exports = {
getEnvVariable,
getProvider,
getAccount,
getContract,
}
Any help would be really appreciated, I've been stuck on this for almost a week now.
Ironically, I found the solution just as I posted this. well, here it is for anyone who's facing the same problem: in the hardhat.config file, remove the '${}' from both network url and accounts. that solved it for me. Can't believe it took me so long to figure out lol
so your config file should look like this:
/**
* #type import('hardhat/config').HardhatUserConfig
*/
require('dotenv').config();
require("#nomiclabs/hardhat-ethers");
require("#nomiclabs/hardhat-waffle");
// require("./scripts/deploy.js");
require("#nomiclabs/hardhat-etherscan");
const { MORALIS_POLYGON_KEY, POLYGONSCAN_API_KEY, ACCOUNT_PRIVATE_KEY } = process.env;
module.exports = {
solidity: "0.8.1",
defaultNetwork: "mumbai",
networks: {
hardhat: {},
mumbai: {
url: MORALIS_POLYGON_KEY,
accounts: [ACCOUNT_PRIVATE_KEY],
},
},
etherscan: {
apiKey: POLYGONSCAN_API_KEY,
},
};

Why am I encountering an error when deploying a nodejs function in gcloud with a zip or directly with editor?

I want to realize cloud functions, I do it on vscode. I think I use all that is necessary to realize the cloud function.
To test this one I installed # google-cloud / storage and it works perfectly on my machine, however when I compress in zip to import it into GCP at deployment it gives me an error:
(Build failed: function.js does not exist; Error ID: 7485c5b6)
While I clearly indicate the entry point of my exports.backup function that I indicate in the entry point in GCP.
Here is the code I'm trying to run - something must be missing but I can't figure it out.
package.json:
{
"name": "export-mysql",
"version": "1.0.0",
"description": "create backup database production",
"main": "index.js",
"scripts": {
"backup": "functions-framework --target=backup"
},
"author": "",
"license": "ISC",
"dependencies": {
"chalk": "^4.1.2",
"dayjs": "^1.10.7",
"dotenv": "^10.0.0",
"googleapis": "^92.0.0",
"#google-cloud/functions-framework": "^2.1.0"
}
}
code:
const { google } = require("googleapis");
const sqlAdmin = google.sqladmin("v1beta4");
const dayjs = require("dayjs");
const chalk = require("chalk");
const dotenv = require("dotenv");
const log = console.log;
const error = console.error;
dotenv.config({ path: "./config/.env" });
let = respo = "";
authorize(function (authClient) {
const date = dayjs(Date.now()).format("YYYYMMDDHHmm");
var request = {
project: "project",
instance: "database-prod",
resource: {
exportContext: {
databases: ["database"],
fileType: "SQL",
kind: "sql#exportContext",
uri: `gs://backup-database-pop/backup-prod-${date}.gz`,
},
},
auth: authClient,
};
sqlAdmin.instances.export(request, function (err, response) {
if (err) {
error(chalk.yellow.bold(`Status: ${err.code}`));
log(chalk.red.bold(`Message: ${err.message}`));
return;
}
// TODO: Change code below to process the `response` object:
// log(chalk.yellow.bold(`Status: ${response.status}`));
log(chalk.greenBright.bold(`Database Exporter dans le bucket -> backup-database-pop fichier: backup-prod-${date}.sql`));
respo = `Database Exporter dans le bucket -> backup-database-pop fichier: backup-prod-${date}.sql`;
return respo;
// log.log(JSON.stringify(response, null, 2));
});
});
function authorize(callback) {
google.auth
.getClient({
scopes: ["https://www.googleapis.com/auth/cloud-platform"],
})
.then((client) => {
callback(client);
})
.catch((err) => {
error(chalk.red.bold("authentication failed: ", err));
});
}
exports.backup = (req, res) => {
res.end();
log(respo);
log("Function complete!");
};
And here is the structure of the folder that is zipped:
functionFolder
folder -> config/.env
index.js
package.json
package-lock.json
authorize.json
Here is the solution you have to select the files and compress them and not compress the folder

prerender Angular 5 not rendering routes

Trying to develop a prerendered app following this tutorial
I run a build script which does not throw any errors, creates a dist-prerender folder with an index file and places individual subfolders with the app routes and HTML.
But what I'm finding is, when the subfolders are rendered, they have exactly the same HTML as the index file and nothing corresponding to that route.
In my app.module:
imports: [
BrowserModule.withServerTransition(
{ appId: 'maleon' })
My .angular.cli.json
"name": "prerender",
"platform": "server",
"root": "src",
"outDir": "dist-prerender",
"main": "main.prerender.ts",
"tsconfig": "tsconfig.prerender.json",
"environmentSource": "environments/environment.ts",
"environments": {
"dev": "environments/environment.ts",
"prod": "environments/environment.prod.ts"
}
}
In my package.json
"build": "ng build --prod && ng build --prod --app prerender --output-hashing=none",
"postbuild": "npm run render",
"render": "ts-node prerender.ts"
in prerender.ts
import 'zone.js/dist/zone-node';
import * as path from 'path';
import * as fs from 'fs';
import { enableProdMode } from '#angular/core';
import { renderModuleFactory } from '#angular/platform-server';
import { AppPrerenderModuleNgFactory } from './dist-prerender/main.bundle';
const distFolder = './dist';
const index = fs
.readFileSync(path.resolve(__dirname, `${distFolder}/index.html`), 'utf8')
.toString();
// we could automate this based on the app.routes.ts file but
// to keep it simple let's just create an array with the routes we want
// to prerender
const paths = [
'/about',
'/accountancy',
'/consultancy',
'/taxation',
'/legals',
'/privacy-policy'];
enableProdMode();
// for every route render the html and save it in the correct folder
paths.forEach(p => renderToHtml(p, distFolder + p));
// don't forget to overwrite the index.html as well
renderToHtml('/index.html', distFolder);
function renderToHtml(url: string, folderPath: string): void {
// Render the module with the correct url just
// as the server would do
console.log(folderPath);
renderModuleFactory(AppPrerenderModuleNgFactory, {
url,
document: index
}).then(html => {
// create the route directory
if (url !== '/index.html') {
fs.mkdirSync(folderPath);
}
fs.writeFile(folderPath + '/index.html', html, (err => {
if (err) {
throw err;
}
console.log(`success`);
}));
});
}
I've checked the main.bundle.js that's generated by running the build command, but it's HTML is not extracted

Gulp Error Spawn EN0ENT

It is my goal to create a set of files and packages to easily share among my team. Currently I have GulpJs setup on my local Mac 10.9.2, and it compiles correctly. However, when I share my setup with my two colleagues, and they run the gulp command there is an error:
events.js:72
throw er; // Unhandled 'error' event
^
Error: spawn ENOENT
at errnoException (child_process.js:998:11)
at Process.ChildProcess._handle.onexit (child_process.js:789:34)
The follow is my package.json file:
{
"name": "MyApp",
"version": "0.0.1",
"description": "Name build process for responsive development frameworks",
"author": "My Name",
"devDependencies": {
"gulp-util": "^2.2.14",
"gulp": "^3.6.2",
"gulp-compass": "^1.1.8",
"gulp-concat": "^2.2.0",
"gulp-uglify": "^0.2.1",
"gulp-livereload": "^1.2.0",
"tiny-lr": "0.0.5",
"gulp-jshint": "^1.5.3",
"gulp-minify-html": "^0.1.1",
"gulp-minify-css": "^0.3.0",
"image-min": "^0.4.5",
"gulp-imagemin": "^0.5.0",
"gulp-newer": "^0.3.0"
}
}
Then I have my gulpfile.js setup as such:
// include gulp
var gulp = require('gulp'),
gutil = require('gulp-util');
// include plug-ins
var compass = require('gulp-compass'),
jshint = require('gulp-jshint'),
imagemin = require('gulp-imagemin'),
newer = require('gulp-newer'),
minifyHTML = require('gulp-minify-html'),
minifyCSS = require('gulp-minify-css'),
concat = require('gulp-concat'),
uglify = require('gulp-uglify'),
reload = require('gulp-livereload'),
lr = require('tiny-lr'),
server = lr();
var paths = {
jsSrc: [
'./src/js/vendor/jquery.js',
'./src/js/vendor/fastclick.js',
'./src/js/vendor/foundation.js'
],
ieJsSrc: [
'./src/js/vendor/foundation-ie/jquery.js',
'./src/js/vendor/foundation-ie/jquery.foundation.buttons.js',
'./src/js/vendor/foundation-ie/jquery.foundation.clearing.js',
'./src/js/vendor/foundation-ie/jquery.foundation.forms.js',
'./src/js/vendor/foundation-ie/jquery.foundation.reveal.js',
'./src/js/vendor/foundation-ie/jquery.foundation.tabs.js',
'./src/js/vendor/foundation-ie/jquery.foundation.tooltips.js'
],
jsDst: './src/js',
jsBuild: './build/common/js',
imgSrc: './src/img/*',
imgDst: './build/common/img',
imgBgSrc: './src/img/bg/*',
imgPhotoSrc: './src/img/photos/*',
imgBgDst: './build/common/img/bg',
imgPhotoDst: './build/common/img/photos',
htmlSrc: './src/**/*.html',
htmlDst: './build',
scssSrc: [
'./src/_scss/utsw.scss',
'./src/_scss/utsw-ie.scss',
'./src/_scss/themes/**/*.scss'
],
scssDst: './src/common/css/',
cssMedSrc: './src/css/themes/root/*.css',
cssMedDst: './src/root/css/',
cssMedBuild: './build/root/css/',
cssEaSrc: './src/css/themes/sites/early-access/*.css',
cssEaDst: './src/sites/early-access/css',
cssEaBuild: './build/sites/early-access/css',
cssProSrc: './src/css/themes/profile/*.css',
cssProDst: './src/profile/css/',
cssProBuild: './build/profile/css/',
cssNetSrc: './src/css/themes/intranet/*.css',
cssNetDst: './src/intranet/css/',
cssNetBuild: './build/intranet/css/'
};
// JS hint task
gulp.task('jshint', function() {
return gulp.src(paths.jsSrc)
.pipe(jshint())
.pipe(jshint.reporter('default'));
});
// minify new or changed HTML pages
gulp.task('html', function() {
return gulp.src(paths.htmlSrc)
.pipe(gulp.dest(paths.htmlDst));
});
// JS concat, strip debugging and minify
gulp.task('scripts', function() {
return gulp.src(paths.jsSrc)
.pipe(concat('vendor.min.js'))
.pipe(gulp.dest(paths.jsDst))
.pipe(uglify())
.pipe(gulp.dest(paths.jsBuild));
});
gulp.task('ieScripts', function() {
return gulp.src(paths.ieJsSrc)
.pipe(concat('ie-vendor.min.js'))
.pipe(gulp.dest(paths.jsDst))
.pipe(uglify())
.pipe(gulp.dest(paths.jsBuild));
});
// Minify any new images
gulp.task('images', function() {
return gulp.src(paths.imgSrc)
.pipe(newer(paths.imgDst))
.pipe(imagemin({optimizationLevel: 5}))
.pipe(gulp.dest(paths.imgDst));
});
// Minify any new images
gulp.task('bg', function() {
// Add the newer pipe to pass through newer images only
return gulp.src(paths.imgBgSrc)
.pipe(newer(paths.imgBgDst))
.pipe(imagemin({optimizationLevel: 5}))
.pipe(gulp.dest(paths.imgBgDst));
});
// Minify any new images
gulp.task('photos', function() {
// Add the newer pipe to pass through newer images only
return gulp.src(paths.imgPhotoSrc)
.pipe(newer(paths.imgPhotoDst))
.pipe(imagemin({optimizationLevel: 5}))
.pipe(gulp.dest(paths.imgPhotoDst));
});
gulp.task('compass', function() {
return gulp.src(paths.scssSrc)
.pipe(compass({
sass: './src/_scss',
css: './src/css',
image: './src/img'
}))
.on('error', function(err) {
// Would like to catch the error here
})
.pipe(gulp.dest('./src/temp'));
});
gulp.task('rootCSS', function() {
return gulp.src(paths.cssMedSrc)
.pipe(gulp.dest(paths.cssMedDst))
.pipe(minifyCSS())
.pipe(gulp.dest(paths.cssMedBuild));
});
gulp.task('eaCSS', function() {
return gulp.src(paths.cssEaSrc)
.pipe(gulp.dest(paths.cssEaDst))
.pipe(minifyCSS())
.pipe(gulp.dest(paths.cssEaBuild));
});
gulp.task('proCSS', function() {
return gulp.src(paths.cssProSrc)
.pipe(gulp.dest(paths.cssProDst))
.pipe(minifyCSS())
.pipe(gulp.dest(paths.cssProBuild));
});
gulp.task('netCSS', function() {
return gulp.src(paths.cssNetSrc)
.pipe(gulp.dest(paths.cssNetDst))
.pipe(minifyCSS())
.pipe(gulp.dest(paths.cssNetBuild));
});
// Rerun the task when a file changes
gulp.task('watch', function() {
var server = reload();
gulp.watch(paths.htmlSrc, ['html']);
gulp.watch(paths.jsSrc, ['scripts']);
gulp.watch(paths.ieJsSrc, ['ieScripts']);
gulp.watch(paths.imgSrc, ['images']);
gulp.watch(paths.imgBgSrc, ['bg']);
gulp.watch(paths.imgPhotoSrc, ['photos']);
gulp.watch(paths.scssSrc, ['compass']);
gulp.watch(paths.cssMedSrc, ['rootCSS']);
gulp.watch(paths.cssEaSrc, ['eaCSS']);
gulp.watch(paths.cssProSrc, ['proCSS']);
gulp.watch(paths.cssNetSrc, ['netCSS']);
gulp.watch(['./src/*.html', './src/js/', './src/css/*.css'], function(e) {
server.changed(e.path);
});
});
// The default task (called when you run `gulp` from cli)
gulp.task('default', [
'html',
'scripts',
'ieScripts',
'images',
'bg',
'photos',
'compass',
'rootCSS',
'eaCSS',
'proCSS',
'netCSS',
'watch'
]);
Any suggestions on how to get my colleagues setup using the same src and gulpfile.js would be awesome.

Resources