How to execute compiled code using the webpack node API? - node.js

webpack node API
Here's what I've got:
#!/usr/bin/env node
const webpack = require('webpack');
const config = require('./webpack.config');
const compiler = webpack(config);
const express = require('express');
const app = express();
const os = require('os');
const Chalk = require('chalk');
const Path = require('path');
const networkInterface = process.platform === 'win32' ? 'Ethernet' : 'eth0';
const ip = os.networkInterfaces()[networkInterface].find(x => x.family === 'IPv4').address;
const port = 3000;
// https://webpack.js.org/api/node/
// https://github.com/webpack/webpack-dev-middleware/blob/master/lib/index.js
// https://github.com/webpack/webpack-dev-middleware
const watcher = compiler.watch({
aggregateTimeout: 250,
poll: 50,
ignored: /\bnode_modules\b/
}, (err, stats) => {
const elapsed = stats.endTime - stats.startTime;
console.log(`Recompiled in ${elapsed}ms`)
// How to run the compiled JS ?
})
let interrupted = false;
process.on('SIGINT', () => {
if(!interrupted) {
interrupted = true;
console.log('\b\bShutting down...');
watcher.close(() => {
console.log('Webpack watcher stopped.')
});
server.close(() => {
console.log('Express server stopped.')
});
} else {
console.log('\b\bForcing shut down');
process.exit(2);
}
});
const server = app.listen(port, '0.0.0.0', () => {
console.log(`Listening on ${Chalk.blue(`http://${ip}:${port}`)}`);
})
When the watcher runs the callback, my JS should be ready to execute. How can I do that? There should be something in that stats object, just not sure what to look for because it's huge.
For example, I can get the output filename like:
const assetsByChunkName = stats.toJson().assetsByChunkName;
const outputPath = stats.toJson().outputPath;
const main = Path.join(outputPath, assetsByChunkName.main);
But it's not on disk. How to I read it using webpack's fake file system? Or do I even need to read it, is the output source in memory somewhere? And I suppose I just run it through eval()?

I came up with something that seems to work:
const watcher = compiler.watch({
aggregateTimeout: 250,
poll: 50,
ignored: /\bnode_modules\b/
}, (err, stats) => {
if(err) {
console.error(err);
return;
}
console.log(stats.toString({
chunks: false, // Makes the build much quieter
colors: true, // Shows colors in the console
stats: 'minimal',
}));
router = eval(stats.compilation.assets['main.js'].source()).default;
})
Only problem is that it exits whenever there is a syntax error in a source file.

Related

Question about Environment/ Config Vars- Heroku and Postgres

Ive managed to slowly grind my way through the curriculum and just completed the backend portion.
I decided to look into building some practice project using Heroku…
If this is not the place to post his please let me know.
While following this tutorial https://devcenter.heroku.com/articles/getting-started-with-nodejs, I get to the portion where we connect to the Postgres server (‘Provision a database’). I do this successfully but I get side tracked on figuring out how to access/use this Postgres database locally when it is located and provided through Heroku. Please consider the following two methods:
Method #1
const cool = require("cool-ascii-faces");
const express = require("express");
const path = require("path");
const PORT = process.env.PORT || 5000;
const { Pool } = require("pg");
const pool = new Pool({
connectionString: process.env.DATABASE_URL,
ssl: {
rejectUnauthorized: false,
},
});
express()
.use(express.static(path.join(__dirname, "public")))
.set("views", path.join(__dirname, "views"))
.set("view engine", "ejs")
.get("/", (req, res) => res.render("pages/index"))
.get("/db", async (req, res) => {
try {
const client = await pool.connect();
const result = await client.query("SELECT * FROM test_table");
const results = { results: result ? result.rows : null };
res.render("pages/db", results);
client.release();
} catch (err) {
console.error(err);
res.send("Error " + err);
}
})
.get("/cool", (req, res) => res.send(cool()))
.get("/times", (req, res) => res.send(showTimes()))
.listen(PORT, () => console.log(`Listening on ${PORT}`));
const showTimes = () => {
let result = "";
const times = process.env.TIMES || 5;
for (i = 0; i < times; i++) {
console.log(i);
result += i + " ";
}
return result;
};
result form method #1
Method #2
Exactly the same code from Method #1 except I take the following steps prior to running:
Kill and reopen terminal at file location (/node-js-getting-started)> Using the credentials information (User, Password, Host, Port, and Database) in the heroku datastore resources tab I run the following in order:
% export DATABASE_URL=postgres://User:Password#Host:Port/Database
% heroku local web <--This runs app # localhost:5000
results from method #2
My question is, why does the one connect and the other doesn’t?

Teardown Errors when testing with Jest, Supertest, MongoDB, and Express

I'm testing my Express routes with Jest and Supertest. While setting up the app, I'm also connecting to MongoDB and adding the MongoClient to app.locals.
I'm getting an error which doesn't happen when I comment out my call to MongoClient.
ReferenceError: You are trying to `import` a file after the Jest environment has been torn down.
at BufferList.Readable (server/node_modules/readable-stream/lib/_stream_readable.js:179:22)
at BufferList.Duplex (server/node_modules/readable-stream/lib/_stream_duplex.js:67:12)
at new BufferList (server/node_modules/bl/bl.js:33:16)
at new MessageStream (server/node_modules/mongodb/lib/cmap/message_stream.js:35:21)
at new Connection (server/node_modules/mongodb/lib/cmap/connection.js:54:28)
/Users/zackchan/Documents/dev/server/node_modules/readable-stream/lib/_stream_readable.js:111
var isDuplex = stream instanceof Duplex;
^
TypeError: Right-hand side of 'instanceof' is not callable
at new ReadableState (/Users/zackchan/Documents/dev/server/node_modules/readable-stream/lib/_stream_readable.js:111:25)
at BufferList.Readable (/Users/zackchan/Documents/dev/server/node_modules/readable-stream/lib/_stream_readable.js:183:25)
at BufferList.Duplex (/Users/zackchan/Documents/dev/server/node_modules/readable-stream/lib/_stream_duplex.js:67:12)
at new BufferList (/Users/zackchan/Documents/dev/server/node_modules/bl/bl.js:33:16)
at new MessageStream (/Users/zackchan/Documents/dev/server/node_modules/mongodb/lib/cmap/message_stream.js:35:21)
at new Connection (/Users/zackchan/Documents/dev/server/node_modules/mongodb/lib/cmap/connection.js:54:28)
at /Users/zackchan/Documents/dev/server/node_modules/mongodb/lib/core/connection/connect.js:36:29
at callback (/Users/zackchan/Documents/dev/server/node_modules/mongodb/lib/core/connection/connect.js:280:5)
at TLSSocket.connectHandler (/Users/zackchan/Documents/dev/server/node_modules/mongodb/lib/core/connection/connect.js:325:5)
at Object.onceWrapper (events.js:421:28)
When I comment out my MongoClient call, I get this Jest warning:
Jest did not exit one second after the test run has completed.
This usually means that there are asynchronous operations that weren't stopped in your tests. Consider running Jest with `--detectOpenHandles` to troubleshoot this issue.
Here's my test script and app module
app.js
const https = require('https');
const fs = require('fs');
const path = require('path');
const dotenv = require('dotenv');
const express = require('express');
const rateLimiter = require('express-rate-limit');
const { MongoClient } = require('mongodb');
const app = express();
const port = process.env.PORT || 443;
const limit = rateLimiter({ window: 15 * 60 * 1000, max: 100 });
var httpsOptions;
if(process.env.NODE_ENV === 'development'){
const rootCA = require('ssl-root-cas').create().addFile(path.join(__dirname, './cert/CA.pem'));
https.globalAgent.options.ca = rootCA;
httpsOptions = {
key: fs.readFileSync(path.join(__dirname, './cert/localhost.key')),
cert: fs.readFileSync(path.join(__dirname, './cert/localhost.crt'))
};
}
MongoClient.connect(process.env.MONGODB_URI, { useNewUrlParser: true, useUnifiedTopology: true }, (err, mongoClient) => {
if(err) throw err;
app.locals.mongoClient = mongoClient;
});
app.use(limit);
app.use(express.json());
app.get('/', (req, res) => {
res.json({path: '/'});
});
const server = https.createServer(httpsOptions, app).listen(port);
module.exports = app;
test.js
const dotenv = require('dotenv');
const path = require('path');
process.env = dotenv.config({path: path.resolve(__dirname, '.env')}).parsed;
const request = require('supertest');
const app = require('../app');
describe('GET /', () => {
it('responds with path of /', (done) => {
// app.locals is empty here
request(app).get('/').expect(JSON.stringify({path: '/'}), done);
});
});
I've tried closing the connection to MongoDB after the test case using app.locals.mongoClient.close() but mongoClient is undefined here. I've also tried wrapping the MongoClient.connect() call in an async function then calling it but that doesn't help either.
Anyone have thoughts on what I should try?
I fixed this by:
Following instructions from Jest docs. You can still access the MongoDB connection through req.app.locals if you assign it to app.locals in the test.js file.
Wrapped my MongoClient.connect() call in app.js:
if(process.env.NODE_ENV === 'production'){
MongoClient.connect(...)
}

nextJS / NodeJS keeps triggering ip_ban

So not sure why, but my nodeJS / NextJS app keeps triggering plesks ip_ban. I recoded our site and moved it away from ReactJS to NextJS. But seems by doing so their must be a command that is making it look like an IP address is trying to connect to many times which makes the plesk server add the IP to a ban list (the rule that keeps getting triggered is - recidive).
I am wondering what is the correct way to solve this issue. Disabling ip_ban is a temp solution but not the ideal solution.
my server.js script is very clear and has no issues (once ip_ban is disabled).
const { createServer } = require("http");
const { parse } = require("url");
const next = require("next");
const dev = process.env.NODE_ENV !== "production";
const port = !dev ? process.env.PORT : 3000;
// Create the Express-Next App
const app = next({ dev });
const handle = app.getRequestHandler();
app
.prepare()
.then(() => {
createServer((req, res) => {
const parsedUrl = parse(req.url, true);
const { pathname, query } = parsedUrl;
handle(req, res, parsedUrl);
console.log("pathname", pathname);
}).listen(port, (err) => {
if (err) throw err;
console.log(`> Ready on http://example.com:${port}`);
});
})
.catch((ex) => {
console.error(ex.stack);
process.exit(1);
});
The rule that keeps getting triggered:
[recidive]
enabled = true
filter = recidive
action = iptables-allports[name=recidive]
logpath = /var/log/fail2ban.log
maxretry = 3

Localhost:3000 not opening from node.js app

I've set up a tiny little app which is run using Express, Nodemon and Mongoose.
A few hours ago, it worked fine - and I don't think I've changed any of the code then. If I have, it must have been accidentally.
But whenever I try to access my localhost for this particular app, it doesn't load. It just sits in this state of loading. No errors appear, the console is clear and it states 'Running on Port 3000'.
If I try other apps, they work fine on localhost, so it must be the code - but I don't even know where to start considering there's no error messages.
Here's my code:
const express = require("express");
const bodyParser = require("body-parser");
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
var encrypt = require('mongoose-encryption');
require("dotenv").config();
const app = express();
mongoose.connect(process.env.MONGO_URI, {useNewUrlParser: true, useUnifiedTopology: true});
const SubmitDebtSchema = new Schema ({
balance: [{
balanceDate: Date,
newBalance: Number
}],
monthly: Number,
date: String,
autoBalance: String
});
const encKey = process.env.ENCKEY;
const sigKey = process.env.SIGKEY;
SubmitDebtSchema.plugin(encrypt, { encryptionKey: encKey, signingKey: sigKey, excludeFromEncryption: ['autoBalance'] });
const SubmitDebt = mongoose.model('submitdebt', SubmitDebtSchema);
app.get("/", async (req, res) => {
const debts = await SubmitDebt.find({ autoBalance: true });
debts.map(debt => {
console.log(debt.id);
const debtDate = parseInt(debt.date);
const finalDate = new Date(new Date().setDate(debtDate));
console.log(finalDate);
const todaysDate = new Date();
console.log(todaysDate);
const debtBalance = debt.balance[debt.balance.length - 1].newBalance;
console.log(debtBalance);
const debtRepayment = debt.monthly;
console.log(debtRepayment);
const updatedBalance = { balanceDate: new Date(), newBalance: debtBalance - debtRepayment };
console.log(updatedBalance);
if (todaysDate < finalDate) {
console.log("Balance shouldn't change.");
}
if (todaysDate >= finalDate) {
console.log("Balance should change");
SubmitDebt.findById(debt._id, (err, submitdebt) => {
if (!submitdebt) {
console.log("Unable to find entry to edit.");
}
else {
submitdebt.balance.push(updatedBalance);
submitdebt.save().then(submitdebt => {
console.log("Debt successfully updated.");
})
.catch(err => {
console.log("Debt unsuccessfully updated.");
});
}
});
}
res.send(debts);
});
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`Lemio is running on port ${ PORT }`);
});
Can anyone spot any reason why my localhost isn't working for this app? Feel like I'm missing something obvious!
Some thoughts:
kill the process and start it over;
check if your port is busy with some other app;
check is mongod is running.
Links:
How To Use ps, kill, and nice to Manage Processes in Linux
How to check if port is in use on Linux or Unix
Manage mongod Processes

node.js gulp-livereload not refreshing in browser

I am new to node.js. With the following code the reload task is triggered when files in the dist folder are modified.
I have the livereload plugin in chrome activated. The status is "live reload is connected". Yet the browser is not refreshed automatically.
Anything missing in this code?
const path = require('path');
const gulp = require('gulp');
const nodemon = require('gulp-nodemon');
const es6Pipeline = require('gulp-webpack-es6-pipeline');
const less = require('gulp-less');
var webserver = require('gulp-webserver');
var livereload = require('gulp-livereload');
es6Pipeline.registerBuildGulpTasks(
gulp,
{
entryPoints: {
'bundle': path.join(__dirname, '/src/index.js')
},
outputDir: path.join(__dirname, 'public/generated/js')
}
);
gulp.task('live-server', (done) => {
});
gulp.task('test-server', function () {
var express = require('express'),
serverport = 5000;
var server = express();
server.use(express.static('./dist'));
server.listen(serverport);
console.log("Server started at port: " + serverport);
// Start live reload
livereload({start: true});
gulp.watch('dist/*.*', ['reload']);
});
gulp.task('reload', function () {
console.log("refreshed");
livereload();
});
gulp.task('generate', ['es6Pipeline:build:release']);
gulp.task('run-release', ['es6Pipeline:build:release', 'test-server']);
gulp.task('run-dev', ['es6Pipeline:build:dev', 'test-server']);
gulp.task('default', ['es6Pipeline:watch', 'test-server']);
Have you tried the following ?
gulp.task('reload', function () {
console.log("refreshed");
livereload.listen();
});

Resources