I've been developing this NodeJS application that takes advantage of Laravel Echo's functionalities to receive information from a server through socket connection.
Server-side
Laravel Echo Server with Laravel 5.7.19
Client-side
"laravel-echo": "^1.5.2"
"socket.io": "^2.2.0"
import Echo from '../../node_modules/laravel-echo/dist/echo.common.js'
import Socketio from 'socket.io-client';
let echo = new Echo({
broadcaster: 'socket.io',
host: 'https://smartfish.danymota.com:8080/',
encrypted: true,
secure: true,
client: Socketio,
auth: {
headers: {
'Authorization': 'Bearer ' + this.token.bearerToken,
},
},
});
echo.private('central.' + macAddress)
.listen('RulesUpdated', (response) => {
handleRules(JSON.parse(response.aquarios))
console.log(new Date().toLocaleString() + " - Rules updated")
})
Problem
Everything works fine in Http, when I switch to HTTPS it just stops working. Also, the socket connection doesn't reach the server (or at least Laravel-echo-server doesn't log it)
IMPORTANT - What I've tried
Ran the application through Browserify, and then on the browser (it works just fine on a browser, even with HTTPS)
Played around with different ports (Again, it works with HTTP, so ports are likely not the problem)
Changed the URL to wss://, /socket.io
Forced socket.io to include a secure: true on options
Changed the version of Laravel Echo
Tried importing both echo.common.js and echo.js
Notes
/api/broadcasting/auth - This is working, so problem is likely not here
Laravel echo server configuration
{
"authHost": "https://smartfish.danymota.com",
"authEndpoint": "/api/broadcasting/auth",
"clients": [{
"appId": "f7506b5e7118092c",
"key": "9015d93999f3a2f7f95a054a76fbcbfd"
}],
"database": "redis",
"databaseConfig": {
"redis": {},
"sqlite": {
"databasePath1": "/database/laravel-echo-server.sqlite"
}
},
"devMode": true,
"host": null,
"port": "8080",
"protocol": "https",
"socketio": {},
"sslCertPath": "/home/danymota/ssl/cert/smartfish.danymota.com.crt",
"sslKeyPath": "/home/danymota/ssl/private/smartfish.danymota.com.key",
"sslCertChainPath": "",
"sslPassphrase": "",
"subscribers": {
"http": true,
"redis": true
},
"apiOriginAllow": {
"allowCors": true,
"allowOrigin": "http://smartfishweb.test/api",
"allowMethods": "GET, POST",
"allowHeaders": "Origin, Content-Type, X-Auth-Token, X-Requested-With, Accept, Authorization, X-CSRF-TOKEN, X-Socket-Id"
}
}
Socket.io debug
socket.io-client:url parse https://smartfish.danymota.com:8080/socket.io +0ms
socket.io-client new io instance for https://smartfish.danymota.com:8080/socket.io +0ms
socket.io-client:manager readyState closed +0ms
socket.io-client:manager opening https://smartfish.danymota.com:8080/socket.io +0ms
socket.io-client:manager connect attempt will timeout after 20000 +4ms
socket.io-client:manager readyState opening +1ms
socket.io-client:manager connect_error +60ms
socket.io-client:manager cleanup +0ms
Thank you all in advance.
that because you have to set laravel echo server settings
use this command laravel-echo-server init and choose https when setting the protocol
or open laravel-echo-server.json
and change protocol to https
{
"authHost": "https://smartfish.danymota.com:8080",
"authEndpoint": "/broadcasting/auth",
"clients": [
{
"appId": "It generates it from the command init",
"key": "It generates it from the command init"
}
],
"database": "Your database driver",
"databaseConfig": {
"redis": {},
"sqlite": {
"databasePath": "/database/laravel-echo-server.sqlite"
}
},
"devMode": true,
"host": null,
"port": "6001", // your node js port the default is 6001
"protocol": "https", // change it here
"socketio": {},
"sslCertPath": "",
"sslKeyPath": "",
"sslCertChainPath": "",
"sslPassphrase": "",
"apiOriginAllow": {
"allowCors": true,
"allowOrigin": "Your domain with the port",
"allowMethods": "GET, POST",
"allowHeaders": "Origin, Content-Type, X-Auth-Token, X-Requested-With, Accept, Authorization, X-CSRF-TOKEN, X-Socket-Id"
}
}
I've solved this problem by adding a flag rejectUnauthorized: false to laravel echo.
this.echo = new Echo({
broadcaster: 'socket.io',
host: config.ECHO_SERVER,
client: Socketio,
rejectUnauthorized: false,
auth: {
headers: {
'Authorization': 'Bearer ' + this.token.bearerToken,
},
},
})
Related
Deploy my api project on vercel, but have cors error, when I do http request from a client on Next.js.
My Nest configuration is like in docs.
const app = await NestFactory.create(AppModule, { cors: true });
But I also tried diferent configurations
app.enableCors({
origin: ['http://localhost:3000', 'https://api-filmgen-pearl.vercel.app'],
methods: ['GET', 'POST'],
credentials: true,
});
//or
app.enableCors();
//or
app.enableCors({
credentials: true,
origin: [/localhost:\d+$/, /\.vercel\.app$/, /\.herokuapp\.com$/],
allowedHeaders: 'origin, content-type, accept, application/json',
});
In Postmar, Swagger or local everything is ok.
UPD: My nest config
/** #type {import('next').NextConfig} */
const { i18n } = require('./next-i18next.config');
const nextConfig = {
webpack(config) {
config.module.rules.push({
test: /\.svg$/i,
issuer: /\.[jt]sx?$/,
use: ['#svgr/webpack'],
});
return config;
},
reactStrictMode: true,
compiler: {
styledComponents: true
},
i18n
}
module.exports = nextConfig
In React client app requests are also falling
Since Heroku is a separate domain than Vercel which is also https, use the secure: true and sameSite: 'none' flags in your CORS config.
Next.js mentions in their Caveats section that requests are by-default same-origin. They also provide a guide on how to customize route CORS behavior in the CORS Request Helpers guide.
Add options to vercel config
{
"version": 2,
"builds": [
{
"src": "src/main.ts",
"use": "#vercel/node"
}
],
"routes": [
{
"src": "/(.*)",
"dest": "src/main.ts",
"methods": ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"]
}
]
}
I am trying to upload a node app online on aws.
When I launch the app on local it works perfectly fine because my app finds access to postgres.
However when I upload my server, it then can't connect to the database.
My app uses loopback.io.
Here is the server/config.json :
{
"restApiRoot": "/api",
"host": "0.0.0.0",
"port": 3000,
"remoting": {
"context": false,
"rest": {
"handleErrors": false,
"normalizeHttpPath": false,
"xml": false
},
"json": {
"strict": false,
"limit": "100kb"
},
"urlencoded": {
"extended": true,
"limit": "100kb"
},
"cors": false
},
"legacyExplorer": false,
"logoutSessionsOnSensitiveChanges": true
}
And here is /server/datasources.json
{
"db": {
"name": "db",
"connector": "memory"
},
"postgres": {
"host": "localhost",
"port": 5432,
"url": "",
"database": "postgres",
"password": "postgresseason",
"name": "postgres",
"user": "postgres",
"connector": "postgresql"
}
}
I have done researches and I think I have to change an url so it doesn't try to look for a "local" way, but don't manage to make it work.
I tried using the url postgress://postgres:postgresseason#db:5432/postgres without success.
The error I am getting are either :
Web server listening at: http://0.0.0.0:8080
Browse your REST API at http://0.0.0.0:8080/explorer
Connection fails: Error: getaddrinfo ENOTFOUND db db:5432
It will be retried for the next request.
Or :
Web server listening at: http://0.0.0.0:3000
Browse your REST API at http://0.0.0.0:3000/explorer
Connection fails: Error: connect ECONNREFUSED 127.0.0.1:5432
It will be retried for the next request.
Any help how to make it work?
Thanks
You need to make sure the postgres server is installed and reachable by aws.
By default it cannot reach your locally installed postgres (without complicate port forwarding etc... )
If you are using ec2 you can install a postgres server locally and use localhost.
Or setting postgres in another aws service like this one: https://aws.amazon.com/rds/postgresql/
Just make sure the nodejs server / service has the required permissions to reach and query the postgres.
I've installed both Laravel echo server and Laravel echo client.
Following is the laravel-echo-server.json configuration.
{
"authHost": "http://taxation.com",
"authEndpoint": "/broadcasting/auth",
"clients": [
{
"appId": "APP_ID",
"key": "someKey"
}
],
"database": "redis",
"databaseConfig": {
"redis": {},
"sqlite": {
"databasePath": "/database/laravel-echo-server.sqlite"
}
},
"devMode": true,
"host": "127.0.0.1",
"port": "3000",
"protocol": "http",
"socketio": {},
"sslCertPath": "",
"sslKeyPath": "",
"sslCertChainPath": "",
"sslPassphrase": ""
}
The following script listens for channel events. It builds fine with npm run dev.
import Echo from 'laravel-echo'
let token = document.head.querySelector('meta[name="token"]');
if (token) {
window.axios.defaults.headers.common['X-CSRF-TOKEN'] = token.content;
} else {
console.error('CSRF token not found: https://laravel.com/docs/csrf#csrf-x-csrf-token');
}
window.Echo = new Echo({
broadcaster: 'socket.io',
host: '127.0.0.1:3000',
reconnectionAttempts: 5
});
window.Echo.join('checked-in-1')
.listen('.user.checked_in', (e) => {
console.log(e);
});
When trying to listen for any event on start laravel-echo-server command. It keeps throwing Client can not be authenticated, got HTTP status 500.
Note :
I really didn't find anything helpful on laravel-echo-serve nor on google.
Any help will be appreciated a lot.
Laravel V5.4
Thanks
Just getting the issue because of CSRF token. Didn't passed the token to the echo.
window.Echo = new Echo({
broadcaster: 'socket.io',
host: '127.0.0.1:3000',
reconnectionAttempts: 5,
csrfToken: token.content <--
});
I'm looking into axios to use for some node.js http calls to APIs while inside a corp firewall - and i'm falling down at the first hurdle.
I found an example that uses axios to do a server http call below
const axios = require('axios');
const API = 'https://jsonplaceholder.typicode.com';
/* GET api listing. */
router.get('/', (req, res) => {
res.send('api works');
});
// Get all posts
router.get('/posts', (req, res) => {
// Get posts from the mock api
axios.get(`${API}/posts`, { proxy: { host: 'http://proxy.com', port: 8080}})
//axios.get(`${API}/posts`)
.then(posts => {
res.status(200).json(posts.data);
})
.catch(error => {
res.status(500).send(error)
});
});
module.exports = router;
but when i'm behind the firewall i get an error below
// http://localhost:3000/api/posts
{
"code": "ENOTFOUND",
"errno": "ENOTFOUND",
"syscall": "getaddrinfo",
"hostname": "http://proxy.com",
"host": "http://proxy.com",
"port": 8080,
"config": {
"transformRequest": {
},
"transformResponse": {
},
"timeout": 0,
"xsrfCookieName": "XSRF-TOKEN",
"xsrfHeaderName": "X-XSRF-TOKEN",
"maxContentLength": -1,
"headers": {
"Accept": "application/json, text/plain, */*",
"User-Agent": "axios/0.15.3",
"host": "jsonplaceholder.typicode.com"
},
"method": "get",
"proxy": {
"host": "http://proxy.com",
"port": 8080
},
"url": "https://jsonplaceholder.typicode.com/posts"
it works find when i switch to a direct connection to the internet and the proxy setting are what i use for npm - i'm not sure if the final solution will be inside or outside of the firewall but i can't figure out to do this either specific to this api or even globally just for dev.. any help would be appreciated..
I believe that axios just wants the host in the proxy config, not the URL with protocol:
"proxy": {
"host": "proxy.com",
"port": 8080
}
I try to configure, new registerede user's timed ot confirmation url.
When user clicks on timed out link from mail, screen looks like :
{"error":{"name":"Error","status":404,"message":"User not found: 19","statusCode":404,"code":"USER_NOT_FOUND","stack":"Error: User not found: 19\n at c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback\\common\\models\\user.js:477:19\n at c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\dao.js:1524:62\n at c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\dao.js:1456:9\n at Object.async.each (c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\node_modules\\async\\lib\\async.js:153:20)\n at allCb (c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\dao.js:1394:13)\n at c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-connector-mysql\\node_modules\\loopback-connector\\lib\\sql.js:1071:7\n at c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\observer.js:166:22\n at doNotify (c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\observer.js:93:49)\n at MySQL.ObserverMixin._notifyBaseObservers (c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\observer.js:116:5)\n at MySQL.ObserverMixin.notifyObserversOf (c:\\NodeJS\\UyguncaAdmin\\node_modules\\loopback-datasource-juggler\\lib\\observer.js:91:8)"}}
I want to send more user-friendly page includes message like "Confirmation mail timed out ...".
I try to use "afterRemote" method, but it does not work. I cant get "in confirm afterRemote" message.
MyUser.afterRemote('confirm', function(ctx, inst, next) {
console.log('in confirm afterRemote...');
next(); });
Is there any way to do this? What is wrong with this afterRemote method?
Edit your server/config.json
and set "disableStackTrace": true and "disableStatusCode": true
under errorHandler section look below example
Don't forget to vote ;-)
Cheers
{
"restApiRoot": "/api",
"host": "0.0.0.0",
"port": 4000,
"remoting": {
"context": {
"enableHttpContext": false
},
"rest": {
"normalizeHttpPath": false,
"xml": false
},
"json": {
"strict": false,
"limit": "100kb"
},
"urlencoded": {
"extended": true,
"limit": "100kb"
},
"cors": {
"origin": true,
"credentials": true
},
"errorHandler": {
"disableStackTrace": true,
"disableStatusCode": true
}
}
}