If I go to my site I get data from database. First Load is slow and takes 1-2 seconds. But then it is fast like 10ms. Why is the first connection slow? It is only when I use cassandra driver.
const http = require('http');
require('dotenv').config();
const { Client } = require('cassandra-driver');
const express = require('express');
const app = express();
const PORT = process.env.PORT;
const routes = require('./src/routes/index');
const client = new Client({
cloud: {
secureConnectBundle: "secure-connect-weinf.zip",
},
keyspace: 'wf_db',
credentials: {
username: "username",
password: "password",
},
});
const cors = require('cors');
app.use(cors());
app.use(express.json());
app.get('/', async (req, res) => {
await client.connect();
const rs = await client.execute("SELECT * FROM employ_by_id;");
console.log(rs);
return res.json({
message: 'Hello'
})
});
const server = http.createServer(app);
server.listen(PORT, () => {
console.log(`Server listen on port ${PORT}`)
});
Your code is connecting (creating the TCP connections) the first time:
app.get('/', async (req, res) => {
await client.connect(); // <- this should be avoiding in the serve path
const rs = await client.execute("SELECT * FROM employ_by_id;");
console.log(rs);
return res.json({
message: 'Hello'
})
});
To mitigate this, you can create the connections beforehard:
app.get('/', async (req, res) => {
const rs = await client.execute("SELECT * FROM employ_by_id;");
console.log(rs);
return res.json({
message: 'Hello'
})
});
const server = http.createServer(app);
client.connect(err => {
//TODO: Handle err
// ...
// Now that I'm connected to my DB, I can start serving requests
server.listen(PORT, () => {
console.log(`Server listen on port ${PORT}`)
});
});
Reads from disk are always slower than reads from memory. When you query for the first time, Cassandra database reads from the disk which gets you a slow result. Second time Cassandra replies from the cached rows if caching is enabled, hence you get your results faster.
PS : Please dont execute "select * from table" queries, they are kind of anti pattern in Cassandra.
Related
Hi I am trying to fetch data from server/index.js api endpoint data.getData() which is provided from api/data.js which in turn gets it's configuration and connection object from db/index.js. The problem is that the async/await is not returning anything and in the dev console network says pending. I don't receive any errors either. The code is below. Using node-pg with pool.query.
//server/index.js
const express = require("express");
const data = require("./api/data")
const PORT = process.env.PORT || 3001;
const app = express();
app.use(express.json())
app.get('/', async (req, res) => {
// res.status(200).send('Hello World!');
await data.getData()
.then(response => {
res.status(200).send(console.log(response));
})
.catch(error => {
res.status(500).send(error);
})
console.log("server running")
})
app.listen(PORT, () => {
console.log(`Server listening on ${PORT}`);
});
//api/data.js
const db = require('../db/index.js');
const getData = async () => {
const query = {
// text: 'SELECT COUNT(*) as count FROM "public"."Subregions-USA";',
text: 'SELECT Now()',
// values: ['B007RKDGVQ'],
// rowMode: 'array',
};
const dbResults = await db.query(query)
.then((data) => JSON.stringify(data))
.catch(error => {
console.log(error)
})
console.log("database response test")
return dbResults
}
// getData()
module.exports = {getData}
The setup is pretty simple but i can't understand why it is not working.
I am trying to connect to postgresql in digital ocean and all the connections and configs are correct. I have used the same setup similar but with electron.js and it works. I am able to retrieve the data easily.
Any help appreciated.
You should avoid using both async/await and then/catch at the same time. Use one of them.
server/index.js
const express = require("express");
const data = require("./api/data")
const PORT = process.env.PORT || 3001;
const app = express();
app.use(express.json())
app.get('/', async (req, res) => {
// res.status(200).send('Hello World!');
try {
const result = await data.getData();
res.status(200).send(result);
} catch (error) {
res.status(500).send(error);
}
console.log("server running")
});
app.listen(PORT, () => {
console.log(`Server listening on ${PORT}`);
});
api/data.js
const db = require('../db/index.js');
const getData = async () => {
const query = {
// text: 'SELECT COUNT(*) as count FROM "public"."Subregions-USA";',
text: 'SELECT Now()',
// values: ['B007RKDGVQ'],
// rowMode: 'array',
};
const dbResults = await db.query(query);
console.log("database response test", dbResults)
return JSON.stringify(dbResults)
}
module.exports = {getData}
Ok I have found the problem and it has nothing to do with the logic itself but more with the package I installed for PostgreSQL.
Instead of installing npm i pg I did instead npm i node-pg for some stupid reason.
After uninstalling and installing the correct package I am able to work with the PostgreSQL response.
I am writing an app, when I start the server, its working nicely, its connecting to database. But when I start http://localhost:5000 from the browser, it does not respond for a minuite then the browser shows a message:
localhost didn’t send any data.
ERR_EMPTY_RESPONSE
Here is my app.js:
const express = require('express');
const app = express();
const cookieParser = require('cookie-parser');
const mongoose = require('mongoose');
app.use(cookieParser);
app.use(express.json());
//const userRouter = require('./routes/user');
//app.use('/user', userRouter);
const startApp = async () => {
try {
await mongoose.connect('mongodb+srv://username:pass#cluster0-dcytp.mongodb.net/test?retryWrites=true&w=majority',
{ useUnifiedTopology: true,useNewUrlParser: true });
console.log(`successfully connected to database`);
const port = process.env.PORT || 5000
app.listen(port, () => {
console.log(`server runnin at ${port}`);
});
} catch (error) {
console.log(error.message)
}
}
startApp();
app.get('/', (req, res) => {
console.log("I am in the root");
res.send("hello World");
})
Why server is not responding from the browser?
try
app.use(cookieParser())
instead of
app.use(cookieParser)
Reference
I had the same problem db is connected but not able to send data. it looks weird but it works for me.
Add a new database user with new password, use the new userName and passw to connect your mongoodb
this is the exact like to add new database user
here is my db connection may it helps you too
mongoose.connect(DATABASE_URL, { useNewUrlParser: true,useUnifiedTopology: true })
const db = mongoose.connection
db.on('error', (error) => console.error(error))
db.once('open', () => console.log('connected to database'))
app.use(express.json())
So my issue is that the program obviously isn't working, but it also isn't crashing. It's like it never executes the try or catch.
const express = require("express"),
app = express(),
sql = require("mssql"),
port = 5000 || process.env.PORT,
bodyParser = require("body-parser"),
routerBoi = express.Router();
const sqlConfig = {
user: "sa",
password: "#Strongman105",
server: "DESKTOP-RVS5F2QHSTESTSERVER",
database: "master"
};
async () => {
try {
// make sure that any items are correctly URL encoded in the connection string
await sql.connect(sqlConfig);
const result = await sql.query(`select * from Users`);
console.log(result);
} catch (err) {
console.log(err);
}
};
// console.log that your server is up and running
app.listen(port, () => console.log(`Listening on port ${port}`));
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
The entirety of my output is "Listening on port 5000".
What am I doing wrong here?
(async function() {
try {
await sql.connect(sqlConfig);
const result = await sql.query(`select * from Users`);
console.log(result);
} catch (err) {
console.log(err);
}
})();
Just wrap your async function with round braces and make it self execution function. you can refer this in the official docs. https://www.npmjs.com/package/mssql#asyncawait
I am trying to learn node.js and I am stuck trying to send the JSON to the browser, I have not tried to consume the REST service because apparently the JSON is not being sent, it does not appear in postman.
This is my code:
Database connections pool
const mysql = require('mysql');
const pool = mysql.createPool({
host: "localhost",
database: "dbname",
user: "root",
password: "",
debug: true
});
module.exports.pool = pool;
Index
const Joi = require('joi');
const connections = require('./connections')
const express = require('express');
const app = express();
app.use(express.json());
function getClients() {
return new Promise((resolve, reject) => {
connections.pool.getConnection((error, connection) => {
connection.query("select * from clientes", (error, rows) => {
if(error) {
reject(new Error);
}
else {
resolve(JSON.stringify(rows));
}
})
connection.release();
})
});
}
app.get('/', (req, res) => {
getClients().then((rows) => {console.log(rows)})
.catch(err => err);
console.log(res);
res.end();
});
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => console.log(`listening on port ${PORT}`));
In this part:
getClients().then((rows) => {console.log(rows)})
It logs the correct JSON response at the end however it calls a lot my attention that I am getting a very long server response before the JSON string.
when I use res.send(rows) it displays nothing, postman gives me 200 response and blank page.
I literally started learning NODEJS 3 days ago and I have been stuck with this for one day :(
You are not sending any data in response.
res.send does not send JSON it sends only string use res.json()
app.get('/', (req, res) => {
getClients()
.then((rows) => res.json(rows))
.catch(err => res.status(400).json(err));
});
I'm passing a data of a variable in URl from an python as
response = urlopen("localhost:5000/warehouse?fruitid=103456",timeout=10);
data = json.loads(response.read().decode('utf8'));
And it reading the response in json format for further processing.
How can I write the node.js routing for posting the data which reads the passed variable value of fruitid=103456 and insert the timestamp into the database when this request occurs.
Please help me out__...
try this - i use this to parse out json responses from other sources...
import pandas as pd
data1=dict(field1=data['field1_in_response'], field2=data['field2_in_response'],...);
data1=pd.DataFrame(data1)
print(data1)
const express = require('express');
const app = express();
const port = 5000;
var mysql = require('mysql')
var squel = require("squel");
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'toor',
database: 'invoice'
});
app.get('/warehouse', (req, res) => {
let fruitid = req.query.fruitid;
let queryDashboard = "select * from fruit where fruitid ='"+fruitid +"'";
connection.query(queryDashboard, function (err, rows, fields) {
if (err) throw err
console.log('query get successful');
var result = rows.map(data => data.name);
res.send(result);
// connection.end()
})
});
app.listen(port, () => console.log(`Example app listening on port ${port}!`));`enter code here`
Here's how you could handle this kind of request in an Express/MongoDB app:
const express = require('express');
const MongoClient = require('mongodb').MongoClient;
const app = express();
const port = 5000;
app.get('/warehouse', (req, res) => {
console.log('fruitid:', req.query.fruitid);
MongoClient.connect('mongodb://localhost:27017', function (err, client) {
if (err) throw err;
const db = client.db('mydatabase');
db.collection('fruits').find({ id: req.query.fruitid }).toArray(function (err, result) {
if (err) throw err;
res.json(result);
});
});
});
app.listen(port, () => console.log(`Example app listening on port ${port}!`));