I have a strange redis behavior:
const redis = require('redis');
const { REDIS_URL: redisUrl, REDIS_PASSWORD: redisPassword } = process.env;
const client = redis.createClient(redisUrl, {
no_ready_check: true,
auth_pass: redisPassword
});
client.on('connect', () => {
redisPassword && client.auth(redisPassword);
});
client.on('error', err => {
global.console.log(err.message)
});
But all the time I receive following error:
throw er; // Unhandled 'error' event
ReplyError: Ready check failed: NOAUTH Authentication required.
Why unhandled ? I set onerror handler
Why Ready check failed ? I disabled it in options
I'm not sure why your code will throw this error. But I try this code in my local machine, it works well.
const redis = require('redis');
const redisPassword = "password" ;
const client = redis.createClient({
host : '127.0.0.1',
no_ready_check: true,
auth_pass: redisPassword,
});
client.on('connect', () => {
global.console.log("connected");
});
client.on('error', err => {
global.console.log(err.message)
});
client.set("foo", 'bar');
client.get("foo", function (err, reply) {
global.console.log(reply.toString())
})
And run node client.js will output :
connected
bar
NOAUTH Authentication required is caused by when redis process command , it found the client is not authenticated so it complained with it.
I guess maybe the redisUrl you give to createClient has some problem, try to debug it or change to my code's way to try. Hopefully you can fix it.
And one more thing: the client.auth(redisPassword) is not necessary because if you set an auth_pass or password option, the redis client will auto send auth command to server before any command.
If you have redis uri saved as string. You need decompose it to object. For ioredis you can use function
export function decomposeRedisUrl(url) {
const [[, , password, host, port]] = [...(url.matchAll(/redis:\/\/(([^#]*)#)?(.*?):(\d*)/g))];
return { password, host, port };
}
There are tests for this function:
it("redis url should be decomposed correctly with password", () => {
expect(decomposeRedisUrl("redis://pass#host.com:9183")).to.eql({
password: "pass",
host: "host.com",
port: "9183",
});
});
it("redis url should be decomposed correctly without password", () => {
expect(decomposeRedisUrl("redis://localhost:6379")).to.eql({
password: undefined,
host: "localhost",
port: "6379",
});
});
and usage
import Redis from "ioredis";
async function getKeysFromRedisUrl(url) {
const rc = new Redis(decomposeRedisUrl(url));
const keys = await rc.keys("*");
rc.disconnect();
return keys;
}
describe("Redis can connect", () => {
it("with cloud", async () => {
expect(await getKeysFromRedisUrl("redis://pass#host.com:9183")).to.be.an("array");
});
it("with local redis instance", async () => {
expect(await getKeysFromRedisUrl("redis://localhost:6379")).to.be.an("array");
});
});
user name is not handled in this function
if you're using docker to run Redis, check if your docker-compose has the command: redis-server --requirepass redis
Then check your .env file to make sure you're using it.
It was the problem here and I was able to fix it by adding the password at .env file.
Related
I can connect to ssh2 without the privateKey
I am trying to enter a server with SFTP but when I get the following error ...
Timed out while waiting for handshake
I'm looking for an example and almost everyone uses the privateKey, is it mandatory? and how is one generated?
My code is the following ...
var Client = require ('ssh2'). Client;
var conn = new Client ();
conn.on ('error', function (err) {
console.log ('SSH - Connection Error:' + err);
});
conn.on ('end', function () {
console.log ('SSH - Connection Closed');
});
conn.on ('ready', function () {
console.log ("------ enter ------");
// code to work with SSH
});
conn.connect ({
host: 'host',
username: 'user',
port: 22
password: 'password',
});
Am trying to connect to AWS redis elastic cache, but i keep getting this error all the time. I am not sure what i am doing wrong here, any help is greatly appreciated. Here's my code
async function testRedis(){
try{
const asyncRedis = require("async-redis");
const client = asyncRedis.createClient({
port: 6379,
host: 'myHost',
auth_pass: redisPassword,
connect_timeout: 900,
enable_offline_queue: false
})
const response = await client.set("test", "response");
const redisResp = await client.get("test");
const connStatus = client.quit()
console.log('connection status::', connStatus)
} catch (err) {
console.log(err)
}
}
I had the same issue. My host didn't an provide IP address for host option so I fixed it by using a connection string: [redis[s]:]//[[user][:password#]][host][:port][/db-number][?db=db-number[&password=bar[&option=value]]]
Should look something like:
const connectionString = 'rediss://username:password#url-to-your.redis.db.server.com:2500'
const client = asyncRedis.createClient(connectionString);
Async redis is a wrapper over Node Redis: https://www.npmjs.com/package/redis
Check out the docs for more info.
I'm tyring to make a connection from my nodejs script to my db connection, but seems like there is a suspicius issue i'm not able to figure out.
At the moment, this is my code:
const { Pool } = require('pg');
const pool = new Pool({
user: 'user',
host: '192.168.1.xxx',
database: 'database',
password: 'password',
port: 5432,
});
pool.on('error', (err, client) => {
console.error('Error:', err);
});
const query = `SELECT * FROM users`;
pool.connect()
.then((client) => {
client.query(query)
.then(res => {
for (let row of res.rows) {
console.log(row);
}
})
.catch(err => {
console.error(err);
});
})
.catch(err => {
console.error(err);
});
The issue seems to be in pool.connect(), but i can't understand what i'm missing because i got no errors in the log. I've installed pg module in the directory of my project with npm install --prefix pg and i know modules are loaded correctly.
I edited postgresql.conf:
# - Connection Settings -
listen_addresses = '*'
and pg_hba.conf
host database user 192.168.1.0/24 md5
to make the database reachable via lan and seems liek it works, because i'm able to connect successfully with apps like DBeaver...but i can't with NodeJS.
It's possible there is some kind of configuration i've to active?
I have downloaded a learning project so it is not my code, but when I want to start it getting this error. I tried to debug a little bit by myself but as I'm new to Nodejs so having a problem here...
Error: Cannot set uncompiled validation rules without configuring a validator
at Object.module.exports.register (/home/antonp/Desktop/pizza-luvrs/routes/index.js:25:10)
at startServer (/home/antonp/Desktop/pizza-luvrs/index.js:12:10)
here is the link for the full project. Github repo
index.js
const Hapi = require('#hapi/hapi')
const plugins = require('./plugins')
const routes = require('./routes')
async function startServer () {
const server = Hapi.Server({
port: process.env.PORT || 3000
})
await plugins.register(server)
routes.register(server)
try {
await server.start()
console.log(`Server running at: ${server.info.uri}`)
} catch (err) {
console.error(`Server could not start. Error: ${err}`)
}
}
process.on('unhandledRejection', err => {
console.log(err)
process.exit()
})
startServer()
Your issue is due to a change in hapi. Try changing the following code in pizza-luvrs-master/routes/login.post.js
validate: {
payload: {
username: Joi.string().alphanum().min(3).max(30).required(),
password: Joi.string().min(3).max(30).required()
}
}
to
validate: {
query:Joi.object({
username: Joi.string().alphanum().min(3).max(30).required(),
password: Joi.string().min(3).max(30).required()
})
}
I have recently deployed my node.js API application on live server. I am getting these issue on live server.
I have googled it, but could not get any exact solution. Can anyone suggest how can i solve this problem?
{ Error: read ETIMEDOUT at TCP.onread (net.js:622:25) errno: 'ETIMEDOUT', code: 'ETIMEDOUT', syscall: 'read', fatal: true }
{ Error: Can't add new command when connection is in closed state at PoolConnection._addCommandClosedState }
I amd using the mysql connection pool like this
var mysql = require('mysql2');
var mysqlPool = mysql.createPool({
host: 'localhost',
user: 'root',
password: 'xyz',
database: 'xyz',
waitForConnections: true,
connectionLimit: 10,
queueLimit: 0
});
module.exports = mysqlPool;
I had a similar problem and ended up having to put the connection request in it's own .js file and import it into the controller-
connectionRequest.js
module.exports = function () {
let mysql = require('mysql2')
let connCreds = require('./connectionsConfig.json');
//Establish Connection to the DB
let connection = mysql.createConnection({
host: connCreds["host"],
user: connCreds['username'],
password: connCreds['password'],
database: connCreds['database'],
port: 3306
});
//Instantiate the connection
connection.connect(function (err) {
if (err) {
console.log(`connectionRequest Failed ${err.stack}`)
} else {
console.log(`DB connectionRequest Successful ${connection.threadId}`)
}
});
//return connection object
return connection
}
Once I did that I was able to import it into my query on the controller file like so
ControllerFile.js
let connectionRequest = require('../config/connectionRequest')
controllerMethod: (req, res, next) => {
//Establish the connection on this request
connection = connectionRequest()
//Run the query
connection.query("SELECT * FROM table", function (err, result, fields) {
if (err) {
// If an error occurred, send a generic server failure
console.log(`not successful! ${err}`)
connection.destroy();
} else {
//If successful, inform as such
console.log(`Query was successful, ${result}`)
//send json file to end user if using an API
res.json(result)
//destroy the connection thread
connection.destroy();
}
});
},
After a lot of messing around I was able to solve the problem by destroying the connection, waiting (this is the important page) and getting the connection again.
conn = await connPool.getConnection();
// We have error: Can't add new command when connection is in closed state
// I'm attempting to solve it by grabbing a new connection
if (!conn || !conn.connection || conn.connection._closing) {
winston.info('Connection is in a closed state, getting a new connection');
await conn.destroy(); // Toast that guy right now
sleep.sleep(1); // Wait for the connection to be destroyed and try to get a new one, you must wait! otherwise u get the same connection
conn = await connPool.connection.getConnection(); // get a new one
}