Why is NodeJS only showing GZip compression after hard refresh? - node.js

I have a ReactJs no CRA, NodeJs and custom webpack configuration. Lighthouse told me to enable text compression and so I looked into this and since NodeJS is loading React for my view, it only shows in the network that I have Gzip compression enabled when I do CTRL+SHIFT+R (hard refresh) however if I just do a Ctrl+R (soft refresh it doesn't show that it's gzipped.
Here is a soft refresh screenshot showing that it's not gzipped if I keep doing a soft refresh in the network console, it will not show that it's gzipped.
However if I do a hard refresh in this screenshot it shows its suddenly gzipped and I don't know why....
Here is my NodeJS code that acts as my server.
const express = require("express");
const compression = require("compression");
const bodyParser = require("body-parser");
const path = require("path");
const cors = require("cors");
const router = express.Router();
const dotenv = require("dotenv");
const config = dotenv.config({ path: path.join(__dirname, "../config/.env") });
const port = process.env.PORT || 5000;
var filePath = path.join(__dirname, "/../build/index.html");
const app = express();
var Blog = require("../routes/Blog");
var Schedule = require("../routes/Schedule");
var detailedReport = require("../routes/detailedReport");
var Login = require("../routes/Login");
var Admin = require("../routes/Admin");
var Forms = require("../routes/Forms");
var yelp = require("../routes/Yelp");
// compress the files in build directory
app.use(compression());
app.use(cors());
// Body Parser Middleware
app.use(bodyParser.json({ limit: "50mb" }));
app.use(bodyParser.urlencoded({ extended: false }));
app.use(function (req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header(
"Access-Control-Allow-Headers",
"Origin, X-Requested-With, Content-Type, Accept"
);
next();
});
// GET/POST Requests
app.use("/api", Blog);
app.use("/api", Schedule);
app.use("/api", detailedReport);
app.use("/api", Login);
app.use("/api", Admin);
app.use("/api", Forms);
app.use("/api", yelp);
// Serve static files on server
app.use(express.static(__dirname + "/../build"));
app.listen(port, () => {
console.log(`Server successfully started`);
});
Webpack Configuration:
const webpack = require("webpack");
const path = require("path");
const CompressionPlugin = require("compression-webpack-plugin");
const { EnvironmentPlugin } = require("webpack");
module.exports = {
entry: ["babel-polyfill", __dirname + "/src/index.js"],
devtool: "source-map",
output: {
path: path.join(__dirname, "/build"),
filename: "bundle.js",
},
module: {
rules: [
{
test: /\.jsx?$/,
exclude: /node_modules/,
use: {
loader: "babel-loader",
query: {
presets: ["react", "env", "stage-0"],
},
},
},
{
test: /\.css$/,
use: [
{ loader: "style-loader" },
{ loader: "css-loader" },
{ loader: "sass-loader" },
],
},
{
test: /\.(png|jpg)$/,
use: {
loader: "file-loader",
},
},
],
},
plugins: [
new CompressionPlugin({
filename: "[path].gz[query]",
algorithm: "gzip",
test: /\.js$|\.css$|\.html$/,
minRatio: 0.8,
threshold: 10240,
}),
new webpack.ProvidePlugin({
$: "jquery",
jQuery: "jquery",
"window.jQuery": "jquery",
}),
// Disable code splitting as it's creating many duplicate bundle profiles
new webpack.optimize.LimitChunkCountPlugin({
maxChunks: 1,
}),
new webpack.DefinePlugin({
"process.env.NODE_ENV": JSON.stringify("production"),
}),
],
};

Related

ExpressJS: 404 error webpack-dev-middleware

Recently I faced a following issue: I'm using expressJS framework for some backend stuff and webpack-dev-middleware for compiling assets during development, seems that dev-middleware is doing its stuff - compiling css and js, because I see some green messages on terminal, but I am constantly geting 404 error when I try to access my js or css file - /css/app.css - 404, /js/app.js - 404 etc. When I try to compile assets using webpack all works fine and I get compiled css and js files with no problems. There is github repo here
app.js
const createError = require('http-errors');
const express = require('express');
const path = require('path');
const cookieParser = require('cookie-parser');
const logger = require('morgan');
const argv = require('yargs').argv;
const app = express();
const devMiddleware = require('webpack-dev-middleware');
const webpack = require('webpack');
const webpackConfig = require('../../webpack.config');
const compiler = webpack(webpackConfig(null, { mode: 'development' }));
// view engine setup
app.set('views', path.join(__dirname, '../../views'));
app.set('view engine', 'pug');
app.use(logger('dev'));
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
app.use(cookieParser());
app.get('/', (req, res, next) => {
res.render('index', { title: "home" });
})
app.use(express.static(path.join(__dirname, '../../public')));
// catch 404 and forward to error handler
app.use(function (req, res, next) {
next(createError(404));
});
if (argv.mode !== 'production') {
app.use(devMiddleware(compiler, {
noInfo: true,
publicPath: webpackConfig(null, { mode: 'development' }).output.publicPath
}));
app.use(require("webpack-hot-middleware")(compiler));
}
// error handler
app.use(function (err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error');
});
module.exports = app;
webpack.config.js
const Path = require('path');
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
const TerserJSPlugin = require('terser-webpack-plugin');
const OptimizeCSSAssetsPlugin = require('optimize-css-assets-webpack-plugin');
const CopyPlugin = require('copy-webpack-plugin');
const webpack = require('webpack');
module.exports = (env, argv) => {
return {
entry: {
app: [
"./src/client/app.js",
"./src/scss/style.scss",
'webpack-hot-middleware/client?path=http://localhost/__webpack_hmr&timeout=20000'
]
},
output: {
filename: 'js/[name].js',
path: Path.resolve(__dirname, 'public'),
publicPath: '/'
},
mode: argv.mode || 'development',
devtool: argv.mode !== 'production' ? 'source-map' : false,
optimization: {
minimizer: [
new TerserJSPlugin({ sourceMap: true }),
new OptimizeCSSAssetsPlugin({
cssProcessorOptions: {
map: argv.mode !== 'production' ? {
inline: false // set to false if you want CSS source maps
} : null
}
})],
},
plugins: [
new MiniCssExtractPlugin({
// Options similar to the same options in webpackOptions.output
// both options are optional
filename: 'css/[name].css',
chunkFilename: '[id].css',
}),
new CopyPlugin([
{ from: 'images/*' }
]),
// OccurrenceOrderPlugin is needed for webpack 1.x only
new webpack.optimize.OccurrenceOrderPlugin(),
new webpack.HotModuleReplacementPlugin(),
// Use NoErrorsPlugin for webpack 1.x
new webpack.NoEmitOnErrorsPlugin()
],
module: {
rules: [
{
test: /\.js$/,
use: [
{
loader: "babel-loader",
}
]
},
{
test: /\.s?css$/,
exclude: [Path.resolve("/node_modules/")],
use: [
{
loader: MiniCssExtractPlugin.loader,
options: {
// you can specify a publicPath here
// by default it uses publicPath in webpackOptions.output
publicPath: '../',
hmr: process.env.NODE_ENV === 'development',
},
}, {
loader: "css-loader",
options: {
sourceMap: true,
url: false,
}
}, {
loader: 'postcss-loader',
options: {
sourceMap: true
}
}, {
loader: "sass-loader",
options: {
sourceMap: true,
includePaths: [Path.resolve('src/scss')],
}
}
]
},
]
}
}
};

Two Webpack Configs (Client, Server) Express App not hosting HTML files

I am building an Application that has a Node Backend that I am trying to bundle with Webpack.
At first I had one Webpack configuration that had target: node. I was unable to compile Websockets into the frontend bundle unless I changed it to target: web but that wasn't compiling my backend code changes. I would have to run tsc && webpack.
I am now to two configs to compile them separately. My current config is:
const path = require("path");
const CopyWebpackPlugin = require("copy-webpack-plugin");
const HtmlWebpackPlugin = require("html-webpack-plugin");
var fs = require("fs");
var nodeModules = {};
fs.readdirSync("node_modules")
.filter(function(x) {
return [".bin"].indexOf(x) === -1;
})
.forEach(function(mod) {
nodeModules[mod] = "commonjs " + mod;
});
const common = {
module: {
rules: [
{
test: /\.tsx?$/,
use: "ts-loader",
exclude: /node_modules/
},
{
test: /\.html$/,
use: [{ loader: "html-loader" }]
}
]
},
resolve: {
extensions: [".tsx", ".ts", ".js"]
}
};
const frontend = {
entry: "./src/index.ts",
output: {
filename: "bundle.js",
path: path.resolve(__dirname, "build"),
publicPath: "/"
},
target: "web",
plugins: [
new CopyWebpackPlugin([
{
from: path.resolve(
"node_modules/#webcomponents/webcomponentsjs/webcomponents-bundle.js"
),
to: path.resolve(__dirname, "build/vendor")
},
{
from: path.resolve(
"node_modules/#webcomponents/webcomponentsjs/custom-elements-es5-adapter.js"
),
to: path.resolve(__dirname, "build/vendor")
}
]),
new HtmlWebpackPlugin({
title: "Flop The World Poker",
template: "build/index.template.html"
})
]
};
const backend = {
entry: "./src/server.ts",
output: {
filename: "server.js",
path: path.resolve(__dirname, "build"),
publicPath: "/"
},
target: "node",
externals: nodeModules
};
module.exports = [
Object.assign({}, common, frontend),
Object.assign({}, common, backend)
];
Before I switch to two configs I was able to host my index.html that is in my output folder using:
import bodyParser from "body-parser";
import express from "express";
import { createServer } from "http";
import { listen } from "socket.io";
import { DeckController} from "./controllers";
const app: express.Application = express();
const port: number = ((process.env.PORT as any) as number) || 3000;
const server = createServer(app);
const io = listen(server);
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(express.static("static"));
app.use("/deck", DeckController);
app.use(express.static(__dirname, { extensions: ["html"] }));
server.listen(port, () => {
console.log(`Listening at http://localhost:${port}/`);
});
io.on("connection", socket => {
console.log("Client connected..");
socket.on("join", data => {
console.log(data);
});
});
I am now receiving cannot get /. I can set up a route like
router.get("/", (req: Request, res: Response) => {
res.send("Hello World");
});
and receive Hello World in the browser.
Can anyone help me to figure out what changed that now I cannot host my html file?
I fixed this with help from Robert in the comments. Here are my new files:
const path = require("path");
const CopyWebpackPlugin = require("copy-webpack-plugin");
const HtmlWebpackPlugin = require("html-webpack-plugin");
var fs = require("fs");
var nodeModules = {};
fs.readdirSync("node_modules")
.filter(function(x) {
return [".bin"].indexOf(x) === -1;
})
.forEach(function(mod) {
nodeModules[mod] = "commonjs " + mod;
});
const common = {
module: {
rules: [
{
test: /\.tsx?$/,
use: "ts-loader",
exclude: /node_modules/
}
]
},
resolve: {
extensions: [".tsx", ".ts", ".js"]
}
};
const frontend = {
entry: "./src/index.ts",
output: {
filename: "bundle.js",
path: path.resolve(__dirname, "build"),
publicPath: "/"
},
target: "web",
plugins: [
new CopyWebpackPlugin([
{
from: path.resolve(
"node_modules/#webcomponents/webcomponentsjs/webcomponents-bundle.js"
),
to: path.resolve(__dirname, "build/vendor")
},
{
from: path.resolve(
"node_modules/#webcomponents/webcomponentsjs/custom-elements-es5-adapter.js"
),
to: path.resolve(__dirname, "build/vendor")
}
]),
new HtmlWebpackPlugin({
title: "Flop The World Poker",
template: "build/index.template.html"
})
]
};
const backend = {
entry: "./src/server.ts",
output: {
filename: "server.js",
path: path.resolve(__dirname, "build"),
publicPath: "/"
},
target: "node",
externals: nodeModules
};
module.exports = [
Object.assign({}, common, frontend),
Object.assign({}, common, backend)
];
import bodyParser from "body-parser";
import express from "express";
import { createServer } from "http";
import { listen } from "socket.io";
import { DeckController } from "./controllers";
const app: express.Application = express();
const port: number = ((process.env.PORT as any) as number) || 3000;
const server = createServer(app);
const io = listen(server);
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(express.static("./build"));
app.use("/deck", DeckController);
app.use(express.static(__dirname, { extensions: ["html"] }));
server.listen(port, () => {
// tslint:disable-next-line:no-console
console.log(`Listening at http://localhost:${port}/`);
});
io.on("connection", socket => {
console.log("Client connected..");
socket.send("Testing Message");
socket.on("join", data => {
console.log(data);
});
});

Webpack - how to test production on my machine

I have 2 webpack config files one for development and one for production.
I would like to test the production config file on my local machine - how do I do that?
In other words I would like to run https://localhost:3000 and see my app while it is in production mode.
The script to run the production config file is npm run build which create files in dist directory - how do I serve those files?
webpack.config.prod.js
// For info about this file refer to webpack and webpack-hot-middleware documentation
// For info on how we're generating bundles with hashed filenames for cache busting: https://medium.com/#okonetchnikov/long-term-caching-of-static-assets-with-webpack-1ecb139adb95#.w99i89nsz
const webpack = require('webpack');
const ExtractTextPlugin = require('extract-text-webpack-plugin');
const WebpackChunkHash = require('webpack-chunk-hash');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const ScriptExtHtmlWebpackPlugin = require('script-ext-html-webpack-plugin');
const UglifyJsPlugin = require('uglifyjs-webpack-plugin');
const path = require('path');
process.env.NODE_ENV = 'production';
const GLOBALS = {
'process.env.NODE_ENV': JSON.stringify('production'),
'process.env.BABEL_ENV': JSON.stringify('production'),
'process.env.PORT': 3000,
__DEV__: false
};
module.exports = {
resolve: {
extensions: ['*', '.js', '.jsx', '.json']
},
devtool: "eval", // more info:https://webpack.github.io/docs/build-performance.html#sourcemaps and https://webpack.github.io/docs/configuration.html#devtool
entry: path.resolve(__dirname, 'public/index'),
target: 'web', // necessary per https://webpack.github.io/docs/testing.html#compile-and-test
output: {
path: path.resolve(__dirname, 'dist'),
publicPath: './',
filename: '[name].[chunkhash].js'
},
stats: {
children: false,
},
plugins: [
// Hash the files using MD5 so that their names change when the content changes.
new WebpackChunkHash({algorithm: 'md5'}), // 'md5' is default value
// Tells React to build in prod mode. https://facebook.github.io/react/downloads.html
new webpack.DefinePlugin(GLOBALS),
// Generate HTML file that contains references to generated bundles. See here for how this works: https://github.com/ampedandwired/html-webpack-plugin#basic-usage
new HtmlWebpackPlugin({
template: 'public/index.ejs',
favicon: 'public/styles/images/icon.png',
minify: {
removeComments: true,
collapseWhitespace: true,
removeRedundantAttributes: true,
useShortDoctype: true,
removeEmptyAttributes: true,
removeStyleLinkTypeAttributes: true,
keepClosingSlash: true,
minifyJS: true,
minifyCSS: true,
minifyURLs: true
},
inject: true,
// Note that you can add custom options here if you need to handle other custom logic in index.html
// To track JavaScript errors via TrackJS, sign up for a free trial at TrackJS.com and enter your token below.
trackJSToken: ''
}),
// Generate an external css file with a hash in the filename
new ExtractTextPlugin('[name].[contenthash].css'),
// https://github.com/numical/script-ext-html-webpack-plugin
new ScriptExtHtmlWebpackPlugin({
defaultAttribute: 'async'
}),
// Minify JS
new UglifyJsPlugin(),
new webpack.optimize.CommonsChunkPlugin({
name: "vendor",
minChunks: isVendor
}),
],
module: {
rules: [
{
test: /\.jsx?$/,
exclude: /node_modules/,
use: ['babel-loader']
},
{
test: /\.eot(\?v=\d+.\d+.\d+)?$/,
use: ['url-loader']
},
{
test: /\.woff(2)?(\?v=[0-9]\.[0-9]\.[0-9])?$/,
use: [
{
loader: 'url-loader',
options: {
limit: 10000,
mimetype: 'application/font-woff'
}
}
]
},
{
test: /\.[ot]tf(\?v=\d+.\d+.\d+)?$/,
use: [
{
loader: 'url-loader',
options: {
limit: 10000,
mimetype: 'application/octet-stream'
}
}
]
},
{
test: /\.svg(\?v=\d+\.\d+\.\d+)?$/,
use: [
{
loader: 'url-loader',
options: {
limit: 10000,
mimetype: 'image/svg+xml'
}
}
]
},
{
test: /\.(jpe?g|png|gif|ico)$/i,
use: ['file-loader']
},
{
test: /(\.css|\.scss|\.sass)$/,
exclude: /node_modules/,
use: ExtractTextPlugin.extract({
use: [
{
loader: 'css-loader',
options: {
minimize: true,
modules: true,
importLoaders: 1,
localIdentName: '[name]-[local]-[hash:base64:2]',
sourceMap: true
}
}, {
loader: 'postcss-loader',
options: {
plugins: () => [
require('autoprefixer')
],
sourceMap: true
}
}, {
loader: 'sass-loader',
options: {
includePaths: [
path.resolve(__dirname, 'src/scss'),
path.resolve(__dirname, "node_modules/foundation-sites/scss")
],
sourceMap: true
}
}
]
})
},
]
}
};
function isVendor({resource}) {
return (
resource && resource.indexOf("node_modules") >= 0 && resource.match(/\.js$/)
);
}
server.js
// server.js
// set up ============================================
const express = require('express');
const path = require('path');
const cookieParser = require('cookie-parser');
const bodyParser = require('body-parser');
const expressValidator = require('express-validator');
const mongoose = require('mongoose');
const config = require('./config');
const enforce = require("express-sslify");
const compression = require('compression');
// Connect to mongoose
mongoose.Promise = global.Promise;
mongoose.connect(config.dbUrl, { useMongoClient: true});
// Init App
const app = express();
// Compress all responses
app.use(compression());
// redirect http requests to https
if (process.env.NODE_ENV === 'production')
app.use(enforce.HTTPS({ trustProtoHeader: true }));
// Support webpack-dev-server
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "http://localhost:5000");
res.header("Access-Control-Allow-Methods", "GET, PUT, POST, DELETE");
res.header("Access-Control-Allow-Headers", "Content-Type, Authorization");
next();
});
// Body Parser Middleware
// parse application/json
app.use(bodyParser.json());
app.use(expressValidator());
// parse application/x-www-form-urlencoded
// for easier testing with Postman or plain HTML forms
app.use(bodyParser.urlencoded({
extended:true
}));
// Cookie Parser Middleware
app.use(cookieParser());
// Set Static Folder
app.use(express.static('public/*.html'));
app.use(express.static('dist'));
// Set Controllers
app.use('/', require('./controllers'));
//------------------------------------------------------------------//
// Set Port
app.set('port', process.env.PORT || 3000);
app.get('*', (req, res) => {
res.sendFile(`${__dirname}/dist/index.html`);
});
// Launch
app.listen(app.get('port'), () => {
console.log('Meeba started listening on port ' + app.get('port'));
});
module.exports = app;
Have you tried adding node path/to/server.js on your config for build.
i.e webpack -mode production ..etc && node server.js. Running the server script with node cli. On your server.js script, you already assigned dist folder as static content afterall.
You could use any webserver (e.g. NGINX) to serve your dist folder.
Have a look at Serving Static Content.
You can get NGINX running with Docker. It can get even easier with this predefined Docker image to serve SPAs in NGINX.

Use React-Router browserHistory, Webpack 2 historyApiFallback and Node

I'm trying to run my React app locally using React, Node, Webpack 2. Whenever I hit a route that isn't / I get a 404. My goal is to be able to run my node server, have webpack-dev-server run, use browserHistory and back my webpack historyApiFallback work.
What currently does work:
If I just run webpack-dev-server and no node server then browserHistory works fine, no 404s.
If I run node with hashHistory it works fine, no 404s.
So that rules out that my routes aren't working. Here is some code:
server.js
const express = require('express');
const expressGraphQL = require('express-graphql');
const schema = require('./schema');
const app = express();
app.use('/graphql', expressGraphQL({
schema,
graphiql: true
}));
const webpackMiddleware = require('webpack-dev-middleware');
const webpack = require('webpack');
const webpackConfig = require('../webpack.config.js');
app.use(webpackMiddleware(webpack(webpackConfig)));
app.listen(process.env.PORT || 5000, () => console.log('Listening'));
webpack.config.js
const webpack = require('webpack');
const path = require('path');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const VENDOR_LIBS = [
'axios', 'react', 'react-dom', 'react-router', 'react-apollo', 'prop-types'
];
module.exports = {
entry: {
bundle: './client/src/index.js',
vendor: VENDOR_LIBS
},
output: {
path: path.join(__dirname, 'dist'),
publicPath: '/',
filename: '[name].[chunkhash].js'
},
module: {
rules: [
{
use: 'babel-loader',
test: /\.js$/,
exclude: /node_modules/
},
{
test: /\.scss$/,
use: [{
loader: "style-loader"
}, {
loader: "css-loader"
}, {
loader: "sass-loader"
}]
},
{
test: /\.(jpe?g|png|gif|svg|)$/,
use: [
{
loader: 'url-loader',
options: {limit: 40000}
},
'image-webpack-loader'
]
}
]
},
plugins: [
new webpack.optimize.CommonsChunkPlugin({
names: ['vendor', 'manifest']
}),
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV)
}),
new HtmlWebpackPlugin({
template: './client/src/index.html'
})
],
devServer: {
historyApiFallback: true
}
};
routes.js
import React from 'react';
import { Router, Route, IndexRoute, browserHistory } from 'react-router';
import App from './components/App';
import Portal from './components/portal/Portal';
const componentRoutes = {
component: App,
path: '/',
indexRoute: { component: Portal },
childRoutes: [
{
path: 'home',
getComponent(location, cb) {
System.import('./components/homepage/Home')
.then(module => cb(null, module.default));
}
}
]
};
const Routes = () => {
return <Router history={ browserHistory } routes={ componentRoutes } />
};
export default Routes;
Again, the goal is to be able to locally start up my node server, use browserHistory and not get 404s. I don't want to use hashHistory and I need to use my node server so I can use graphql. I also don't want to revert back to webpack v1. Though here is a link to where people got it working in v1:
historyApiFallback doesn't work in Webpack dev server
The historyApiFallback option is specifically for webpack-dev-server. If you're running your own server, even with webpack-dev-middleware, you need to configure it to send the index.html when a 404 occurs. Because you're using html-webpack-plugin the index.html you want to send does not exist on your file system but only in memory. To make it work you can access the output of the webpack compiler as shown in the comment of html-webpack-plugin #145.
server.js
const path = require('path');
const express = require('express');
const expressGraphQL = require('express-graphql');
const schema = require('./schema');
const app = express();
app.use('/graphql', expressGraphQL({
schema,
graphiql: true
}));
const webpackMiddleware = require('webpack-dev-middleware');
const webpack = require('webpack');
const webpackConfig = require('../webpack.config.js');
const compiler = webpack(webpackConfig);
app.use(webpackMiddleware(compiler));
// Fallback when no previous route was matched
app.use('*', (req, res, next) => {
const filename = path.resolve(compiler.outputPath, 'index.html');
compiler.outputFileSystem.readFile(filename, (err, result) => {
if (err) {
return next(err);
}
res.set('content-type','text/html');
res.send(result);
res.end();
});
});
app.listen(process.env.PORT || 5000, () => console.log('Listening'));

Extract .scss to css using webpack

I am trying to compile all my sass files in to a single style.css file as explained Here . I install my npm install --save-dev extract-text-webpack-plugin and in my webpack.config.dev.js is below.
const path = require('path');
const webpack = require('webpack');
const WebpackNotifierPlugin = require('webpack-notifier');
const ExtractTextPlugin = require('extract-text-webpack-plugin');
module.exports = {
devtool: 'eval',
entry: [
'./src',
],
output: {
path: path.join(__dirname, 'dist'),
filename: 'app.js',
publicPath: '/',
},
plugins: [
new webpack.HotModuleReplacementPlugin(),
new webpack.NoErrorsPlugin(),
new WebpackNotifierPlugin({ title: 'My Project Build' }),
new ExtractTextPlugin('public/style.css', {
allChunks: true,
}),
],
module: {
loaders: [{
test: /\.js$/,
loader: 'babel-loader',
include: path.join(__dirname, 'src'),
}, {
test: /\.scss$/,
loader: ExtractTextPlugin.extract('css!sass'),
include: path.join(__dirname, 'public/sass'),
}, {
test: /\.js$/,
loader: 'eslint-loader',
include: path.join(__dirname, 'src'),
}],
},
};
In my public folder , i added another folder sass and a single body.scss file for testing below.
.first-component {
.text { font-size: 1.4rem; }
.button { font-size: 1.7rem; }
}
when i run node server.js i was expecting my body.scss to me written in my empty style.css. My server.js is below.
const path = require('path');
const express = require('express');
const webpack = require('webpack');
const config = require('./webpack.config.dev');
const app = express();
const compiler = webpack(config);
app.use(require('webpack-dev-middleware')(compiler, {
noInfo: true,
publicPath: config.output.publicPath,
}));
app.use(require('webpack-hot-middleware')(compiler));
app.use('/', express.static('public'));
app.get('*', (req, res) =>
res.sendFile(path.join(__dirname, 'public/index.html'))
);
app.listen(3030, 'localhost', err => {
if (err) {
console.log(err);
return;
}
console.log('Listening at http://localhost:3030');
});
My webpack is executed on node server.js . However, My style.css file still remains empty. I try everything to no avail . Please what am i missing and how do i address this?

Resources