Getting a window not defined error when using NextJS and next-web-worker package.
require('dotenv').config();
const withPlugins = require('next-compose-plugins');
const withCSS = require('#zeit/next-css');
const withWorkers = require('#zeit/next-workers');
const Dotenv = require('dotenv-webpack');
const webpackConfig = (config) => {
config.plugins = config.plugins || [];
config.target = 'universal';
config.output = {
...config.output,
filename: 'bundle.js',
path: path.join(__dirname, 'dist'),
globalObject: '(this || global)',
publicPath: 'http://localhost:3000'
};
config.plugins = [
...config.plugins,
// Read the .env file
new Dotenv({
path: path.join(__dirname, '.env'),
systemvars: true
})
];
return config;
};
module.exports = withPlugins([[withCSS], [withWorkers]], webpackConfig);
The recommended work around appears to be setting the globalObject to this or global But that isn't working. Are there any other work arounds for this?
I was just experiencing this same error and it was solved by following the advice here:
yarn add #zeit/next-workers#canary
Related
I add fs to my nextjs project and received the following error:
Module not found: Can't resolve 'fs' in '/Users/neven/Development/next-js/node_modules/dotenv/lib'
I found that to resolve this issue, I should add config.node = { fs: 'empty' } to my next.config.js file.
The problem is that when I add that config param, dotenv plugin stops working, that is env variables are not loaded on client side.
This is my next.config.js file, which works without any issues.
const withCSS = require('#zeit/next-css')
const dotenv = require('dotenv')
const path = require('path')
const Dotenv = require('dotenv-webpack')
dotenv.config()
module.exports = withCSS({
webpack: config => {
config.plugins = config.plugins || []
config.plugins = [
...config.plugins,
// Read the .env file
new Dotenv({
path: path.join(__dirname, '.env'),
systemvars: true,
}),
]
return config
},
})
And then when I add fs: 'empty', it looks like this:
const withCSS = require('#zeit/next-css')
const dotenv = require('dotenv')
const path = require('path')
const Dotenv = require('dotenv-webpack')
dotenv.config()
module.exports = withCSS({
webpack: config => {
config.plugins = config.plugins || []
config.node = {
fs: 'empty'
}
config.plugins = [
...config.plugins,
// Read the .env file
new Dotenv({
path: path.join(__dirname, '.env'),
systemvars: true,
}),
]
return config
},
})
Do you have any suggestions on how I could work this thing out?
Let me know in case additional details are needed.
I found out what the issue was; dotenv plugin is working correctly, but I was trying to get the variables on client side, and that is not possible in this way.
The solution to use env variables on client side is to add env: { EXAMPLE: 'helloWorld' } to next.config.js file.
const withCSS = require('#zeit/next-css')
const dotenv = require('dotenv')
const path = require('path')
const Dotenv = require('dotenv-webpack')
dotenv.config()
module.exports = withCSS({
env: { EXAMPLE: 'helloWorld' },
webpack: config => {
config.plugins = config.plugins || []
config.node = {
fs: 'empty'
}
config.plugins = [
...config.plugins,
// Read the .env file
new Dotenv({
path: path.join(__dirname, '.env'),
systemvars: true,
}),
]
return config
},
})
The issue here is that your client-side can't access the environment variables.
Started NextJS 9.4, you can use .env* files to add your environment variables.
For your client-side to get access access to the environment variables, you just need to prefix them with NEXT_PUBLIC_
NEXT_PUBLIC_YOUR_KEY="keykeykey"
These can be accessible with:
process.env.NEXT_PUBLIC_YOUR_KEY
I have a really small express application. Basically a poor man's server less function provider. However this is nothing I use in production and it works in an unpackaged mode. But after I webpack this application the dynamic require always fails with an Error: Cannot find module exception. But the module is there and the path is correct.
devserver.js:
const express = require('express')
const fs = require('fs')
const resourceMapper = fs.existsSync(__dirname + '/../webpack.config.js') ? require('../webpack.config') : null
const app = express()
// enable express json
app.use(express.json())
// add dynamic lambda functions
app.post('/api/:lambda', function(request, response) {
const resource = resourceMapper
? `${__dirname}/.${resourceMapper[1].entry[request.params.lambda.replace(/^common-/, "").replace(/\.js$/, "")]}`
: `${__dirname}/${request.params.lambda}`
const lambda = require(resource) // <--------- problematic require
const result = lambda.main(request.body)
response.status(200).json(result)
});
app.listen(3000)
webpack.conf.js:
module.exports = [
{
name: "development-server",
target: 'node',
externals: [(require('webpack-node-externals'))()],
node: {
__dirname: false,
__filename: false,
},
entry: {
devserver: "./devserver/server.js",
},
output: {
filename: "./server.js",
path: __dirname + '/build/node/'
}
},
{
name: "lambdas",
target: 'node',
externals: [(require('webpack-node-externals'))()],
entry: {
returns: "./lambdas/returns/src/main/index.js",
echo: "./lambdas/echo/src/main/echo.js"
},
output: {
library: "[name]",
libraryTarget: "commonjs2",
filename: "./common-[name].js",
path: __dirname + '/build/node/'
}
}
]
I have really tried lots of things and various modules and plugins. In the end they only way I got it working is using a very nasty eval trick.
const require2 = eval('require')
//[snip]
const lambda = resourceMapper
? require(`${__dirname}/.${resourceMapper[1].entry[request.params.lambda.replace(/^common-/, "").replace(/\.js$/, "")]}`)
: require2(`./${request.params.lambda}`)
For example, I have a compiled binary cudaDeviceQuery which returns a list of devices as JSON. Here's a piece of code:
export default function cudaDeviceQuery(): Promise<CollectorResponse> {
const throwError = () => {
throw new Error("Unfortunately your platform isn't yet unsupported");
};
const file = __DARWIN__
? path.join(__dirname, 'darwin', 'cudaDeviceQuery')
: __WIN32__
? path.join(__dirname, 'win', 'cudaDeviceQuery.exe')
: throwError();
const descriptor = spawn(file);
return new Promise((resolve, reject) => {
let outerData = '';
descriptor.stdout.on('data', data => {
outerData += data;
});
descriptor.on('close', () => {
try {
resolve(JSON.parse(outerData));
} catch (e) {
reject(e);
}
});
});
}
But when I use this function from renderer process __dirname is /, so I get spawn /darwin/cudaDeviceQuery ENOENT error. What's proper way to spawn it in dev envivroment and pack it in production?
A webpack config:
webpack.config.base.js:
/**
* Base webpack config used across other specific configs
*/
const webpack = require('webpack');
const path = require('path');
const getReplacements = require('./app/app-info').getReplacements;
const { dependencies: externals } = require('./app/renderer/package.json');
module.exports = {
module: {
noParse: [path.join(__dirname, 'node_modules/ws')],
rules: [
{
test: /\.tsx?$/,
use: [
{
loader: 'babel-loader',
},
{
loader: 'ts-loader',
},
],
exclude: /node_modules/,
},
],
},
output: {
path: path.join(__dirname, 'app', 'renderer'),
filename: 'bundle.js',
libraryTarget: 'commonjs2',
},
// https://webpack.github.io/docs/configuration.html#resolve
resolve: {
extensions: ['.js', '.ts', '.tsx', 'json'],
modules: [path.join(__dirname, 'app', 'renderer'), 'node_modules'],
},
plugins: [new webpack.DefinePlugin(getReplacements())],
externals: [...Object.keys(externals || {}), 'ws'],
};
webpack.config.development.js:
/**
* Build config for development process that uses Hot-Module-Replacement
* https://webpack.github.io/docs/hot-module-replacement-with-webpack.html
*/
const webpack = require('webpack');
const merge = require('webpack-merge');
const baseConfig = require('./webpack.config.base');
const getReplacements = require('./app/app-info').getReplacements;
const port = process.env.PORT || 3000;
module.exports = merge(baseConfig, {
devtool: 'inline-source-map',
entry: [
'react-hot-loader/patch',
`webpack-hot-middleware/client?path=http://localhost:${port}/__webpack_hmr&reload=true`,
'./app/renderer/index',
],
output: {
publicPath: `http://localhost:${port}/dist/`,
},
module: {
rules: [
// Css, SCSS, woff loaders are here
],
},
plugins: [
// https://webpack.github.io/docs/hot-module-replacement-with-webpack.html
new webpack.HotModuleReplacementPlugin(),
new webpack.LoaderOptionsPlugin({
debug: true,
}),
],
// https://github.com/chentsulin/webpack-target-electron-renderer#how-this-module-works
target: 'electron-renderer',
});
webpack.config.electron.js:
/**
* Build config for electron 'Main Process' file
*/
const webpack = require('webpack');
const merge = require('webpack-merge');
const baseConfig = require('./webpack.config.base');
const getReplacements = require('./app/app-info').getReplacements;
module.exports = merge(baseConfig, {
devtool: 'source-map',
entry: ['./app/main/index.ts'],
// 'main.js' in root
output: {
path: __dirname,
filename: './app/main/main.js',
},
plugins: [
// Add source map support for stack traces in node
// https://github.com/evanw/node-source-map-support
// new webpack.BannerPlugin(
// 'require("source-map-support").install();',
// { raw: true, entryOnly: false }
// ),
],
/**
* Set target to Electron specific node.js env.
* https://github.com/chentsulin/webpack-target-electron-renderer#how-this-module-works
*/
target: 'electron-main',
/**
* Disables webpack processing of __dirname and __filename.
* If you run the bundle in node.js it falls back to these values of node.js.
* https://github.com/webpack/webpack/issues/2010
*/
node: {
__dirname: false,
__filename: false
},
});
As you see, I'm using dev server for hot module replacement, so maybe that is reason of this... I have server.js which create server with scripts and then I use it from main process. Here's server.js:
/**
* Setup and run the development server for Hot-Module-Replacement
* https://webpack.github.io/docs/hot-module-replacement-with-webpack.html
*/
const argv = require('minimist')(process.argv.slice(2));
const { spawn } = require('child_process');
async function createMiddleware(port, configPath) {
const express = require('express');
const webpack = require('webpack');
const webpackDevMiddleware = require('webpack-dev-middleware');
const webpackHotMiddleware = require('webpack-hot-middleware');
const config = require(configPath);
const app = express();
const compiler = webpack(config);
const PORT = process.env.PORT || port;
const wdm = webpackDevMiddleware(compiler, {
publicPath: config.output.publicPath,
stats: {
colors: true,
},
});
app.use(wdm);
app.use(webpackHotMiddleware(compiler));
const server = app.listen(PORT, serverError => {
if (serverError) {
return console.error(serverError);
}
console.log(`Listening at http://localhost:${PORT}`);
});
process.on('SIGTERM', () => {
console.log('Stopping dev server');
wdm.close();
server.close(() => {
process.exit(0);
});
});
}
createMiddleware(3000, './webpack.config.development'); // A main renderer process
createMiddleware(3010, './webpack.config.server'); // A backend for communicating between renderer and remote server
if (argv['start-hot']) {
spawn('npm', ['run', 'start-hot'], {
shell: true,
env: process.env,
stdio: 'inherit',
})
.on('close', code => process.exit(code))
.on('error', spawnError => console.error(spawnError));
}
Another words, I need to call cudaDeviceQuery library from electron renderer process. I'm using a electron-builder but it doesn't matter, I can switch to another builder.
There are two things. If you set __dirname: true in your web app config you will get the relative path of the file from your context directory
If you set __dirname: false then __dirname will have the full path.
Development Mode
You have two options
Set __dirname: true and concatenate it with os.cwd()
Set __dirname: false and use __dirname directly
Production Mode
Set __dirname: true and use os.cwd().
Set __dirname: true and use process.resourcePath
I will prefer 2 as the preferred approach in production
Add this in package.json:
"scripts": {
"start": "electron .", "install": "electron-rebuild",
"package-osx": "electron-packager . Node-RED --platform=darwin --arch=x64 -- out=build --overwrite",
"package-mac": "electron-packager . --overwrite --platform=darwin --arch=x64 --prune=true --out=release-builds",
"package-win": "electron-packager . electron-serialport --overwrite --asar=true --platform=win32 --arch=x64 --prune=true --out=release-builds --version-string.CompanyName=CE --version-string.FileDescription=CE --version-string.ProductName=\"CryptoApp\"",
"package-linux": "electron-packager . electron-serialport --overwrite --asar=true --platform=linux --arch=x64 --prune=true --out=release-builds"
},
"dependencies": {
"electron-packager": "^12.1.0",
"electron-prebuilt": "^1.4.13",
}
In case of not working for windows use the following:
"package-win": "electron-packager . electron-serialport --overwrite --asar=true --platform=win32 --arch=ia32 --prune=true --out=release-builds --version-string.CompanyName=CE --version-string.FileDescription=CE --version-string.ProductName=\"CryptoApp\"",
Thanks...
I am trying to set the initialState of my redux store from the NodeJS server. I am unsure on what I missed and how to get it to work.
Here's my server.js
const express = require('express');
const path = require('path');
const webpack = require('webpack');
const webpackDevMiddleware = require('webpack-dev-middleware');
const webpackHotMiddleware = require('webpack-hot-middleware');
const webpackHotServerMiddleware = require('webpack-hot-server-middleware');
const config = require('./webpack/webpack.development.config.js');
const compiler = webpack(config);
const app = express();
app.use(webpackHotMiddleware(compiler.compilers.find(
compiler => compiler.name === 'client'
)));
app.use(webpackHotServerMiddleware(compiler));
app.listen(3000);
Here's my webpack.development.config
const path = require('path');
module.exports = [
{
name: 'client',
target: 'web',
entry: './routes/client.jsx',
output: {
path: path.join(__dirname, 'assets'),
filename: 'client.js',
publicPath: '/assets/',
},
resolve: {
extensions: ['.js', '.jsx']
},
devtool: 'source-map',
module: {
rules: [
{
test: /\.(js|jsx)$/,
exclude: /(node_modules\/)/,
use: [
{
loader: 'babel-loader',
}
]
},
{
test: /\.scss$/,
use: [
{
loader: 'style-loader',
},
{
loader: 'css-loader',
options: {
modules: true,
importLoaders: 1,
localIdentName: '[name]__[local]___[hash:base64:5]',
sourceMap: true
}
},
{
loader: 'sass-loader'
}
]
},
{ test: /\.html$/, loader: "html-loader" }
],
},
},
];
PS, the same modules are there for server, but I've removed in an attempt to keep it short.
Here's the client.jsx
import React from 'react';
import { createStore } from 'redux'
import ReactDOM from 'react-dom';
import { BrowserRouter } from 'react-router-dom';
import Routes from './routes.jsx';
import { Provider } from 'react-redux';
import MainStore from '../views/store/MainStore';
import home from './home';
const store = createStore(MainStore, "Name");
ReactDOM.render((
<Provider store={store}>
<BrowserRouter>
<Routes />
</BrowserRouter>
</Provider>
), document.getElementById('root'));
And finally my home.jsx
const express = require('express');
const path = require('path');
const webpack = require('webpack');
const webpackDevMiddleware = require('webpack-dev-middleware');
const webpackHotMiddleware = require('webpack-hot-middleware');
const webpackHotServerMiddleware = require('webpack-hot-server-middleware');
const config = require('../webpack/webpack.development.config.js');
const router = express();
router.get('/test', (req, res) => {
res.json({ message: "I'm just testing to see if this works" });
});
const name = "Mona";
module.exports = {
router: router,
name: name,
};
I would like to import const name from home.jsx to client.jsx and set the initial value of the store as this imported value. I could do it manually by the line
const store = createStore(MainStore, "Name");
in client.jsx. But instead of using "Name" as a string, I would like to replace it with the constant value in home.jsx.
Any suggestions?
Set initial state of store in an isomorphic react-redux app
Typical solution for supposed task is using preloaded state, which had been calculated at SSR phase and injected as serialized informational field in rendered HTML-template. Also you can create dedicated resource, like /api/initialState.js, which create initial state for your view model, dependent on client-side request and maybe JWT credentials.
You can see similar solution in resolve-scripts npm package, which provides ability for full-automatic isomorphic storage for redux on server, including SSR.
I just copy-pasted webpack-dev-server + express settings from webpack docs, but it doesn't work because the server can't find files to serve. What is wrong with this setup?
server.js
const express = require('express');
const webpack = require('webpack');
const webpackDevMiddleware = require('webpack-dev-middleware');
const app = express();
const config = require('../webpack.config.js');
const compiler = webpack(config);
app.use(webpackDevMiddleware(compiler, {
publicPath: config.output.publicPath
}));
/*app.use('/',express.static('public')) it works when used insted of webpack dev middleware */
// Serve the files on port 3000.
app.listen(3000, function () {
console.log('Example app listening on port 3000!\n');
});
webpack.config.js
const path = require('path');
const webpack = require('webpack');
const scssRules = {
test:/\.scss$/,
use: ['style-loader', 'css-loader', 'sass-loader']
};
const jsRules = {
test: /\.js$/,
use: [
{loader: 'babel-loader'}
]
};
module.exports = {
entry: './js/App.js',
output: {
filename: 'bundle.js',
path: path.resolve(__dirname, 'public'),
publicPath: '/'
},
module: {
rules: [
jsRules,
scssRules
]
},
devServer: {
contentBase: './public'
},
plugins:[
new webpack.HotModuleReplacementPlugin()
],
devtool: 'inline-source-map'
}
File structure:
What i see:
I also came across this problem.
Turns out webpack-dev-middleware doesn't output any files, instead serves from memory. While the express server is expecting physical files.
Here, this plugin might help: https://github.com/gajus/write-file-webpack-plugin