I've decided to make the jump from building my websites with slim, php and twig to using node.js and express.
To limit the shock of changing my whole process I want to keep using slim as the template engine instead of the default jade.
I have tried to change the code to change the render engine over to twig but now I am getting the following error message. Does anyone know what I am doing wrong? I am at a loss as to why it is not working. Any help would be very helpful.
Error message:
Error: Failed to lookup view "error" in views directory "/var/www/html/SocialTrackers/app/views"
at Function.render (/var/www/html/SocialTrackers/app/node_modules/express/lib/application.js:580:17)
at ServerResponse.render (/var/www/html/SocialTrackers/app/node_modules/express/lib/response.js:1008:7)
at /var/www/html/SocialTrackers/app/app.js:38:7
at Layer.handle_error (/var/www/html/SocialTrackers/app/node_modules/express/lib/router/layer.js:71:5)
at trim_prefix (/var/www/html/SocialTrackers/app/node_modules/express/lib/router/index.js:315:13)
at /var/www/html/SocialTrackers/app/node_modules/express/lib/router/index.js:284:7
at Function.process_params (/var/www/html/SocialTrackers/app/node_modules/express/lib/router/index.js:335:12)
at next (/var/www/html/SocialTrackers/app/node_modules/express/lib/router/index.js:275:10)
at Layer.handle_error (/var/www/html/SocialTrackers/app/node_modules/express/lib/router/layer.js:67:12)
at trim_prefix (/var/www/html/SocialTrackers/app/node_modules/express/lib/router/index.js:315:13)
This is the code I am currently running. Other than changes to shift over the using twig as the render engine, I have made no other changes to the initial express install.
app.js
var createError = require('http-errors');
var twig = require("twig");
var express = require('express');
var path = require('path');
var cookieParser = require('cookie-parser');
var logger = require('morgan');
var indexRouter = require('./routes/index');
var usersRouter = require('./routes/users');
var app = express();
// view engine setup
app.set('views', path.join(__dirname, './views'));
app.set('view engine', 'twig');
app.use(logger('dev'));
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
app.use('/bootstrap', express.static(__dirname + '/node_modules/bootstrap/dist/'))
app.use('/', indexRouter);
app.use('/users', usersRouter);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
next(createError(404));
});
// error handler
app.use(function(err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error');
});
module.exports = app;
Additionally, I am the following file structure (default express)
app
--bin
--node_modules
--public
--routes
----index.js
----users.js
--views
----error.twig
----index.twig
----landingpage.twig
--app.js
Took me a few days to figure this one out.
As I said I'm new to Nodejs and express so I believe I made a very rookie mistake. I thought I would post the solution incase anyone else has this issue.
I was running the node server on a ec2 instance. While I was updating the code it turns out I had to restart the node server as well.
I'm using pm2 to keep the server running all the time so for me the solution was to run the following command.
pm2 restart [namespace of server]
Note: for me, I never gave the server a namespace so it was simply 'default'. If you don't know the namespace you can find it by running 'pm2 list'
pm2 restart default
Related
I have set up a Janusgraph Docker instance (janusgraph/janusgraph:latest - Lucene/BerkeleyDB/Tinkerpop/Gremlin Server) in a Linux VM in my datacentre. On the same VM and on my laptop, I've run the same JanusGraph Docker image but run the gremlin console (adjusting conf/remote.yaml to point to the gremlin server) and successfully imported air-routes.graphml into the server instance. I can also successfully query the air-routes data with some simple traversals.
In summary: I'm confident that the server is running, has data, can be remotely connected and respond to traversals.
I have been stuck on the next step for a long, long time:
I have created a basic Node express stub:
var createError = require('http-errors');
var express = require('express');
var path = require('path');
var cookieParser = require('cookie-parser');
var logger = require('morgan');
var gremlin = require('gremlin');
const DriverRemoteConnection = gremlin.driver.DriverRemoteConnection;
const traversal = gremlin.process.AnonymousTraversalSource.traversal;
var indexRouter = require('./routes/index');
var usersRouter = require('./routes/users');
var app = express();
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'pug');
app.use(logger('dev'));
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
app.use('/', indexRouter);
app.use('/users', usersRouter);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
next(createError(404));
});
// error handler
app.use(function(err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error');
});
module.exports = app;
const g = traversal().withRemote(
new DriverRemoteConnection('ws://mygremlinserverhost:8182/gremlin'));
This fails with the following:
npm run start
greg:air-routes/ $ npm run start [21:07:39]
Debugger attached.
> air-routes#0.0.0 start
> node ./bin/www
Debugger attached.
Waiting for the debugger to disconnect...
node:internal/process/promises:227
triggerUncaughtException(err, true /* fromPromise */);
^
Error: connect ECONNREFUSED <The IP of the VM>:8182
at TCPConnectWrap.afterConnect [as oncomplete] (node:net:1138:16) {
errno: -61,
code: 'ECONNREFUSED',
syscall: 'connect',
address: '<The IP of the VM>',
port: 8182
}
Waiting for the debugger to disconnect...
greg:air-routes/ $
(Also, I don't understand this notification, or whether it's the cause (or effect) of the inability to connect - What am I doing wrong or not doing. Please assume an idiot novice who needs the basics explaining. Thanks, I think? ;o) )
node:internal/process/promises:227
triggerUncaughtException(err, true /* fromPromise */);
^
I believe the inability to connect was because I had not specified a docker port mapping to the OS port on the server's docker run command.
Should have been (with the port mapping switch -p)
docker run --name janusgraph-default -p 8182:8182 --volume /myLocalDir/gremlin:/dataImports janusgraph/janusgraph:latest
I generated a demo express server using these steps:
npm install -g express-generator
express myExpressApp --view pug
Needless to say, the app runs fine on my local machine (npm start)
I then pushed the code to Cloud Source Repositories
Then deploy to Google Cloud Functions through their web app
What am I missing?
Source codes of my generated demo app:
/myExpressApp/app.js
var createError = require('http-errors');
var express = require('express');
var path = require('path');
var cookieParser = require('cookie-parser');
var logger = require('morgan');
var indexRouter = require('./routes/index');
var usersRouter = require('./routes/users');
var app = express();
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'pug');
app.use(logger('dev'));
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
app.use('/', indexRouter);
app.use('/users', usersRouter);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
next(createError(404));
});
// error handler
app.use(function(err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error');
});
module.exports = app;
/myExpressApp/routes/index.js
var express = require('express');
var router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: 'Express' });
});
module.exports = router;
Final updates
I added the following as suggested by Abrar Hossain
app.js
//...
module.exports = app;
exports.app = functions.http.onRequest(app);
module.exports = { app };
package.json
"scripts": {
"start": "functions-framework --target=company"
},
"dependencies": {
"#google-cloud/functions-framework": "^1.7.1",
But the issue persists
Entry point is set as "app"
In short, during deployment, you have to specify the name of the JS function you want to use as GCP Cloud function, it is specified --entry-point flag. More information here (pls, do look at the flags sections). app in your case. If you provide the deployment script, I can point you to the exact spot
Long answer - the common misconception is that a GCP Cloud functions = Express app. It is not. Although under the hood they might use express and you might event request certain behavior via configuration. In reality, for GCP it is just a JS function, nothing more or less. No routers or anything like that. If you want to test it locally, use Functions Framework. You can get further details in my post
Export your app like this:
module.exports = { app };
And add your app.js entry to package.json:
"main": "app.js",
In my case I had firebase functions set that was created by the firebase cli with the Typescript option and I found not only this but some other issues that were preventing its deploy or execution. The only way I got it working was by completely removing the functions folder and running the firebase cli functions setup again firebase init functions but this time choosing the Javascript option.
A few other things to keep in mind:
Use CommonJS imports instead of ES6 e.g. const gcipCloudFunctions = require("gcip-cloud-functions")
Run npm install in your functions folder if it wasn't already
Deploy firebase functions before you deploy your gcloud one firebase deploy --only functions
Deploy your gcloud function gcloud functions deploy <your function name> --trigger-http --runtime=nodejs14 --verbosity=debug
If deploy works but it doesn't run, have a look at your runtime logs by just running gcloud functions logs read <your function name> or via the GCloud UI
I kept running the deploy script from the wrong directory and this happened, so easy fix was to actually cd into the project directory.
Must be a pretty silly error, because I didn't find much advice online. But, I guess until you know how to interpret feedback, it's a bit of mystery. I'm a newbie at the Cloud Console, started a tutorial one day, came back to it the next (after reopening the website), opened the terminal to deploy, and didn't noticed that I was in the /home/${USER} dir, and not the project dir.
I have a pretty simple nodejs project that uses express. When I start this project locally I have noticed that something is calling a POST to /inform about every 30 seconds. I'd like to know what's calling inform and what the purpose is.
I'm new to node. Is this normal? I haven't implemented a route for this call so it causes a 404.
Here's my main app:
const createError = require('http-errors');
const express = require('express');
const path = require('path');
const cookieParser = require('cookie-parser');
const logger = require('morgan');
const fileUpload = require('express-fileupload');
const app = express();
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'jade');
app.use(logger('dev'));
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
app.use(fileUpload());
// routes
const indexRouter = require('./routes/index');
const usersRouter = require('./routes/users');
app.use('/', indexRouter);
app.use('/users', usersRouter);
// catch 404 and forward to error handler
app.use((req, res, next) => {
console.log(req)
next(createError(404));
});
// error handler
app.use((err, req, res) => {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error');
});
// Start the server
const PORT = process.env.PORT || 8080;
app.listen(PORT, () => {
console.log(`App listening on port ${PORT}`);
console.log('Press Ctrl+C to quit.');
});
In my console, I see this about every 30 seconds:
POST /inform 404 14.002 ms - 2158
POST /inform 404 13.910 ms - 2158
POST /inform 404 31.536 ms - 2158
EDIT:
Thank you for the comments. I changed my express port to 8000 and it no longer happens. So something on my local machine is looping and posting to localhost:8080/inform. I'll have to trace this down.
I have a Ubiquity Unify network stack at home. After running (and stopping) the Unifi Controller on my laptop, all my Unify devices continue to send a POST <laptop IP>:8080/inform.
My own application was getting its logged filled up with the same unknown route: "/inform" error.
Solutions:
Choose a different port
Bind your application to 'localhost' instead of '0.0.0.0'
Get a dedicated controller device, like a Raspberry Pi or Unifi Cloud Key
This could be pretty much any application on your PC or even other systems in your local network, if you are listening on public addresses.
You could implement the route to see if the request header/body give any hint as to where this is coming from.
It may also be possible for external software such as Wireshark to monitor network calls to localhost, including their source.
Otherwise use a different port where no one is sending periodic pings to.
*** Updated with new errors,
After running
webpack --display-error-details
I get a more detailed error. please notice it appears to be looking for node_modules going backwards in directories out of my project.
Module not found: Error: Can't resolve 'html' in
'/Users/wuno/Dropbox/google/devops/htdocs/riverwalk' BREAKING CHANGE:
It's no longer allowed to omit the '-loader' prefix when using
loaders.
You need to specify 'html-loader' instead of 'html'. resolve 'html' in '/Users/wuno/Dropbox/google/devops/htdocs/riverwalk'
Parsed request is a module using description file:
/Users/wuno/Dropbox/google/devops/htdocs/riverwalk/package.json
(relative path: .) after using description file:
/Users/wuno/Dropbox/google/devops/htdocs/riverwalk/package.json
(relative path: .)
resolve as module
/Users/wuno/Dropbox/google/devops/htdocs/node_modules doesn't exist or is not a directory
/Users/wuno/Dropbox/google/devops/node_modules doesn't exist or is not a directory
/Users/wuno/Dropbox/google/node_modules doesn't exist or is not a directory
/Users/wuno/Dropbox/node_modules doesn't exist or is not a directory
/Users/wuno/node_modules doesn't exist or is not a directory
/Users/node_modules doesn't exist or is not a directory
/node_modules doesn't exist or is not a directory
I have been working on an Angular 2 express app for a few months and have dealt with the common webpack breaking for no reason. Normally I delete the node_modules folder and run npm install which fixes any issues. This time without changing anything webpack will not compile.
I pushed my project to git. Everything was working great. I shut my computer down to go to a meeting. Once I opened everything back up and ran webpack I get an error which states Can't resolve html in... This error appears for every component in my project. One for each separate view. This is a exact error for the first page it throws on,
ERROR in ./assets/app/layouts/public.component.ts
Module not found: Error: Can't resolve 'html' in '/Users/wuno/Dropbox/google/devops/htdocs/mypp'
# ./assets/app/layouts/public.component.ts 25:18-52
# ./assets/app/app.module.ts
# ./assets/app/main.ts
I am at a loss for how to proceed here. I even cloned my git repo at my last working push and it throws the same errors when running webpack.
This is my webpack config file,
var webpackMerge = require('webpack-merge');
var commonConfig = require('./webpack.config.common.js');
module.exports = webpackMerge(commonConfig, {
devtool: 'cheap-module-eval-source-map',
output: {
path: './public/js/app',
publicPath: "/js/app/",
filename: 'bundle.js',
chunkFilename: '[id].chunk.js'
}
});
Angular components are at
/assets/app/
index.html is at,
/public/index.html
This is my app.js in the root directory,
var express = require('express');
var models = require('./models/');
var path = require('path');
var favicon = require('serve-favicon');
var logger = require('morgan');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var appRoutes = require('./routes/index');
// api controllers
var authors = require('./controllers/authors');
var books = require('./controllers/books');
var books = require('./controllers/address');
var app = express();
// view engine setup
app.set('views', path.join(__dirname, 'public'));
app.set('view engine', 'hbs');
app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
app.use(logger('dev'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended: false}));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, '/public')));
app.use(function (req, res, next) {
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept');
res.setHeader('Access-Control-Allow-Methods', 'POST, GET, PATCH, DELETE, OPTIONS');
next();
});
// Route app to index.html and hand off to angular2
app.use('/', appRoutes);
// catch 404 and forward to error handler
app.use(function (req, res, next) {
return res.render('index');
});
models.sequelize
.authenticate()
.then(function () {
console.log('Connection successful');
})
.catch(function(error) {
console.log("Error creating connection:", error);
});
module.exports = app;
Any help would be appreciated. At the very least how can I debug this further?
I'm experimenting with Angular2 and, with the quick start guide on their official documentation, I'm definitely up and running. However, if I want to do any APIs on the server or host it on the cloud it seems I'll need to use Node. I think I have everything set correctly in the server.js file, yet when I run it it seems like it's not loading everything from SystemJS and I get the following errors:
Here is the Node code:
var express = require("express");
var bodyParser = require("body-parser");
var app = express();
var System = require('systemjs');
// loads './app.js' from the current directory
System.import('app').then(function(m) {
console.log(m);
});
// Config
app.set('port', (process.env.PORT || 3000));
app.use('/app', express.static(__dirname + '/app'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.listen(app.get('port'), function() {
console.log('MEAN app listening on port ' + app.get('port'));
});
app.get('*', function(req, res) {
res.sendFile(__dirname + '/index.html');
});
I'm not sure what I'm missing to include in the Node side that gets included when running lite-server from npm start that is included from the Angular2 quick start guide.
When you are telling express where to look for the static files, you have to include where your js files will be as well.
For example, in one of my projects I have it like so
app.use('/css', express.static(path.resolve(appPath, 'css')));
app.use('/lib/css', express.static(path.resolve(appPath + '/lib', 'css')));
app.use('/lib/js', express.static(path.resolve(appPath + '/lib', 'js')));
app.use('/assets', express.static(path.resolve(appPath, 'assets')));
app.use('/node_modules', express.static(path.resolve(appPath, 'node_modules')));
app.use('/app', express.static(path.resolve(appPath, 'app')));
I believe that might be your issue or hopefully set you in the right path.