Run Node.Js on localhost - node.js

i created a node.js code that listen to sql server and access it database and im trying to run the code on browser using "http://localhost:1433/test" but it always giving men this
this is my node.js code:
const uid = require('uuid');
const express = require('express');
const bodyParser = require('body-parser');
const sql = require('mssql');
const driver = require('msnodesqlv8');
const config = {
// multipleStatements: true, TO EXECUTE MORE THAN ONE QUERY IN ONE STATEMENT
//driver: 'msnodesqlv8',
server: 'MOSTAFA',
user: 'admin',
password: 'mostafabaron123#',
database: 'WholeSale',
options:{
trustedConnection: true,
useUTC: true,
}
};
const userExistCode = 200;
const buyerCreatedCode = 400;
const buyerNotCreatedCode = 401;
const sellerCreatedCode = 500;
const sellerNotCreatedCode = 501;
const connectionErrCode = 100;
var app = express();
app.use(bodyParser.json()); // accept JSON parms
app.use(bodyParser.urlencoded({extended:true}));
// bueyer signup part
app.post('/buyerSignUp', function(req, res) {
var postData = req.body; // get post params
var userId = uid.v4();
sql.connect(config, (err)=>{
if(err){
console.log('not connected');
return;
}
var request = new sql.Request();
request.input('email', sql.VarChar(45), postData.email)
.input('password', sql.VarChar(45), postData.password)
.input('category', sql.VarChar(45), postData.category)
.query('select * from Buyer where email = #email and category = #category
and password = #password'
, (err, recordSet)=>{
if(recordSet.length == 1){
res.status(userExistCode).send("Buyer already exists.");
}else{
new sql.Request()
.input('buyerId', sql.VarChar(45), userId)
.input('firstName', sql.VarChar(45), postData.firstName)
.input('lastName', sql.VarChar(45), postData.lastName)
.input('email', sql.VarChar(45), postData.email)
.input('password', sql.VarChar(45), postData.password)
.input('phoneNumber', sql.VarChar(45), postData.phoneNumber)
.input('category', sql.VarChar(45), postData.category)
.query('insert into Buyer(buyer_id, first_name, last_name, email,
password, phone_number, category) values(#buyerId, #firstName,
#lastName, #email, #password, #phoneNumber, #category)'
, (err, recordSet)=>{
if(err){
console.log(err);
return;
}
if(recordSet.length == 1){
res.status(buyerCreatedCode).send("Account Created as Buyer.");
}else{
res.status(buyerNotCreatedCode).send("Account not Created as
Buyer.");
}
});
}
});
});
});
app.listen(1433, ()=>{
console.log('server is running on 1433...');
});
what i expect to see the messages if the user created or already existed.
anyone can explain why this is happening?

Just from reading your code, here is what I suspect is happening: you are starting up express and listening to port 1433. It does successfully call your listening callback and print the appropriate console.log statement, so we know it gets that far.
Then, you attempt to connect to your database. Your database port is also 1433, which we know is not correct (the database port cannot be the same as your express port, if it was, express would never have started). So your connection to the database fails.
The database connection callback checks for an error, then returns. This means you'll never see the "connected" console.log statement, and you'll never actually create the route for /buyerSignup. Basically, your app is running, but has no useful routes.
(I'm not sure what you even expect to see at the URL /test -- nothing in this code snippet shows you creating that route. But, you'll need to fix the issues above in any case.)

Related

Error: No responses defined for platform: null when listing users google calendar events

I'm trying to implement a reminders dialogflow agent in node js that reminds the user on his google calendar upcoming events. however I'm getting an No responses defined for platform: null error when calling the intent for listing the upcoming events.
This is my code:
const express = require('express');
const google = require('googleapis').google;
const jwt = require('jsonwebtoken');
const dfff = require('dialogflow-fulfillment')
const {googlec} = require('googleapis');
// Google's OAuth2 client
const OAuth2 = google.auth.OAuth2;
// Including our config file
const CONFIG = require('./config');
// Creating our express application
const app = express();
// Allowing ourselves to use cookies
const cookieParser = require('cookie-parser');
app.use(cookieParser());
// Setting up EJS Views
app.set('view engine', 'ejs');
app.set('views', __dirname);
console.log(dfff)
app.get('/', function (req, res) {
// Create an OAuth2 client object from the credentials in our config file
const oauth2Client = new OAuth2(CONFIG.oauth2Credentials.client_id, CONFIG.oauth2Credentials.client_secret, CONFIG.oauth2Credentials.redirect_uris[0]);
// Obtain the google login link to which we'll send our users to give us access
const loginLink = oauth2Client.generateAuthUrl({
access_type: 'offline', // Indicates that we need to be able to access data continously without the user constantly giving us consent
scope: CONFIG.oauth2Credentials.scopes // Using the access scopes from our config file
});
return res.render("index", { loginLink: loginLink });
});
app.get('/auth_callback', function (req, res) {
// Create an OAuth2 client object from the credentials in our config file
const oauth2Client = new OAuth2(CONFIG.oauth2Credentials.client_id, CONFIG.oauth2Credentials.client_secret, CONFIG.oauth2Credentials.redirect_uris[0]);
if (req.query.error) {
// The user did not give us permission.
return res.redirect('/error');
} else {
oauth2Client.getToken(req.query.code, function(err, token) {
if (err)
return res.redirect('/');
// Store the credentials given by google into a jsonwebtoken in a cookie called 'jwt'
res.cookie('jwt', jwt.sign(token, CONFIG.JWTsecret));
return res.redirect('/');
});
}
});
app.post('/', express.json(),(req,res)=>{
//if (!req.cookies.jwt) {
// We haven't logged in
//return res.redirect('/');
//}
const oauth2Client = new OAuth2(CONFIG.oauth2Credentials.client_id, CONFIG.oauth2Credentials.client_secret, CONFIG.oauth2Credentials.redirect_uris[0]);
const calendar = google.calendar({version: 'v3' , auth:oauth2Client});
const agent = new dfff.WebhookClient({
request : req,
response : res
})
function welcome(agent){
agent.add("Hi")
}
function listEvents(agent){
calendar.events.list({
'calendarId': 'primary',
'auth':oauth2Client,
'timeMin': (new Date()).toISOString(),
'showDeleted': false,
'singleEvents': true,
'maxResults': 10,
'singleEvents': true,
'orderBy': 'startTime'
}).then((err,response)=> {
let events = response.result.items;
if (events.length > 0) {
for (let i = 0; i < events.length; i++) {
var event = events[i];
var when = event.start.dateTime;
if (!when) {
when = event.start.date;
}
return agent.add('Be ready for '+ event.summary + ' event '+ 'at ' + when )
}}
else {
return agent.add('You dont have any upcoming events.');
}
});
}
let intenMap = new Map()
intenMap.set('Default_Welcome_Intent',welcome)
intenMap.set('Remind_me',listEvents)
agent.handleRequest(intenMap)
});
// Listen on the port defined in the config file
app.listen(CONFIG.port, function () {
console.log(`Listening on port ${CONFIG.port}`);
});
Whenever listEvents function is processed I'm getting (Error: No responses defined for platform null) any idea why?
The issue is that listEvents() does an asynchronous operation that returns a Promise (the call to calendar.events.list()), which you handle through the .then() block, but you don't have a way for the agent.handleRequest() function to know this and to wait for the Promise to complete. To do this, you need to return a Promise.
Fortunately, in your case, the solution is straightforward, since the call and then() chain return a Promise, you can just return it. This would be done by adding the return keyword before calendar.events.list(). It might look something like this:
function listEvents(agent){
return calendar.events.list({
// Parameters go here
}).then((err,response)=> {
// Code to handle response and return a message go here
});
}

How to write GET REST API with Node.js, Express,MS SQL to insert data into db with multiple parameters?

I'm working on a REST API Get request to insert new data with multiple parameters into a SQL database, but it doesn't work. Here is my code:
var express = require('express'); // Web Framework
var app = express();
var sql = require('mssql'); // MS Sql Server client
const { request } = require('http');
// Connection string parameters.
var sqlConfig = {
user: 'username',
password: 'password',
server: 'serveraddress',
database: 'databasename'
}
// Start server and listen on http://localhost:8081/
var server = app.listen(8081, function () {
var host = server.address().address
var port = server.address().port
console.log("app listening at http://%s:%s", host, port)
});
app.get("/item/insert/:name/:category/:color/:description/:numberOfUsage/:size/:status", function(req , res){
console.log(req.params)
sql.connect(sqlConfig, function() {
var request = new sql.Request();
var stringRequest = "INSERT INTO dbo.Item (name, category, color, description, numberOfUsage, size, status) VALUES ("+ req.query.name +","+ req.query.category+","+ req.query.color+","+ req.query.description+","+req.query.numberOfUsage+","+req.query.size+","+req.query.status+" )";
request.query(stringRequest, function(err, recordset) {
if(err) console.log(err);
res.end(JSON.stringify(recordset)); // Result in JSON format
});
});
})
If i try to insert a new item via browser like "http://localhost:8081/item/insert/MyNewShirt/Shirt/Red/Thatsmynewshirt/1/L/1" returns always status 200 - OK, but the IDE console prints the "RequestError: Invalid column name 'undefined'."
What its mean? How solve it?
Because you're directly accessing params value to it's name like
In Express you need to get query string variables via req.query like this for all params. like in this query
INSERT INTO dbo.Item (name, category, color, description, numberOfUsage, size, status) VALUES ("+ req.query.name +","+ req.query.category+","+ req.query.color+","+ req.query.description+","+req.query.numberOfUsage+","+req.query.size+","+req.query.status+" )"
change all values like shows below
var name = req.query.name;
Otherwise, in NodeJS, you can access req.url and the builtin url module to [url.parse]
(https://nodejs.org/api/url.html#url_url_parse_urlstring_parsequerystring_slashesdenotehost) it manually:
var url = require('url');
var url_parts = url.parse(request.url, true);
var query = url_parts.query;
I found a solution. First the new code:
app.get("/item/insert/:name/:category/:color/:description/:numberOfUsage/:size/:status", function(req , res){
sql.connect(sqlConfig, function() {
var request = new sql.Request();
var stringRequest = "INSERT INTO dbo.Item (name, category, color, description, numberOfUsage, size, status) VALUES ('"+ req.params.name +"','"+ req.params.category+"','"+ req.params.color+"','"+ req.params.description+"','"+req.params.numberOfUsage+"','"+req.params.size+"','"+req.params.status+"')";
console.log(stringRequest)
request.query(stringRequest, function(err, recordset) {
if(err) console.log(err);
res.end(JSON.stringify(recordset)); // Result in JSON format
});
});
})
The only two things I had to change were:
Parameters: 'req.query' to 'req.params'
SQL Statement: Instead of using " just using ' to close the VALUES

NodeJS MongoDB Times Out Loading Route

I am having troubles after moving all of my Controllers Routes and Models into their own files. I seem to only get a timeout when loading anything from the database and none of my console.log()'s run anymore, The model works and it posts the testPost (in the controller) perfectly fine. I tried adding the testPost because console.log() or errors aren't showing in my console.
Controller
//jshint esversion: 6
const mongoose = require('mongoose');
const Post = require(`./../models/posts`);
//Initial Post to posts Model
const testPost = new Post ({
postTitle: `Default Post`,
postDate: Date.now(),
postBody: `If you're seeing this, It means your DB is connected and you're ready to go!`,
postAuthor: `Admin`
});
const defaultPost = [testPost];
//View Single Post
exports.showOne = function(req, res){
const requestedPostTitle = req.params.id;
Post.findOne({postTitle: requestedPostTitle}, function(err, foundPost){
if (!err && foundPost){
const title = foundPost.postTitle;
const date = foundPost.postDate;
const content = foundPost.postBody;
const author = foundPost.postAuthor;
res.render(`./post/post`, {
title: title,
date: date,
content:content,
author:author
});
}
});
};
Model
//jshint esversion: 6
const mongoose = require(`mongoose`);
const postSchema = new mongoose.Schema ({
{SCHEMA DATA}
}
});
module.exports = mongoose.model(`Post`, postSchema);
exports.databaseName = `postsDB`;
index.js Routes
app.get(`/posts`, postPages.showAll);
app.get(`/post/:id`, postPages.showOne);
app.get(`/post/compose`, postPages.compose);
app.post(`/post/compose`, postPages.create);
Firstly I moved my database startup into it's own file called db.js as per this great article that was referenced to me by another SO user:
https://theholmesoffice.com/mongoose-connection-best-practice/
After moving the configurations into db.js I logged the errors as per the article:
// If the connection throws an error
mongoose.connection.on('error',function (err) {
console.log('Mongoose default connection error: ' + err);
});
I had not realised that I wouldn't get error logs on database startup without this.
Make sure to send a request only after the connection to the DB was established.

nodejs: does a find query to select n last records need to run asynchronously

I am a newbie in nodejs. I want to fetch n last records from mongo and write them to a socket when a(n android) client connects.
I wrote some code and it was okay when I tested it on a vps i had, but, after moving to new vps a problem appeared.
When the first client connects to the socket, it does not get the records. However, if a second client connects to the socket the find query runs again and the first client can see the related emit, but, not for second client!
I added a log after the io.emit command and it runs for every client connecting.
I also added another emit that just sends a test text and it delivered to client as soon as he connected.
My code:
const express = require('express'),
http = require('http'),
app = express(),
server = http.createServer(app),
io = require('socket.io').listen(server);
app.get('/', (req, res) => {
res.send('Chat Server is running on port ......')
});
var mongoose = require('mongoose');
var ChatMessage = require('./chatmessage');
var chatsdb = "mongodb://localhost:27017/chatsdb"
mongoose.connect(chatsdb, {useNewUrlParser: true});
mongoose.connection.on('connected', function () {
console.log('Mongoose connected!')
});
mongoose.connection.on('error', function (err) {
console.log('Mongoose default connection error:' + err);
});
io.on('connection', (socket) => {
let userId = socket.id;
console.log('user ' + userId + ' connected');//it always run
io.emit('connected_message', {"message_text": "ok!" , "user_id":userId});//it always run
ChatMessage.find().sort({created_at:-1}).limit(10).exec(function (err, posts) {
let list_of_messages = [];
for (let i = 0; i < posts.length; i++) {
if (posts[i] != null) {
let message = new ChatMessage({"message_text": posts[i].message_text, "name": posts[i].name , "created_at": posts[i].created_at});
list_of_messages.push(message);
}
}
io.emit('last_fifty_message', list_of_messages);
console.log("list_of_messages:" + list_of_messages); //it always run
});
});
server.listen(50000, () => {
console.log('Node app is running on port 50000')
});
and it's ChatMessage class:
var mongoose = require('mongoose');
const Schema = mongoose.Schema;
var ChatMessageSchema = new Schema({
name : { type: String, require:true },
message_text : { type: String , require:true },
created_at : { type:Date , default: Date.now },
message_id : { type: Number , autoIncrement:true }
});
ChatMessageSchema.pre('save',function(next){
this.created_at = new Date();
console.log('this.created_at : '+this.created_at)
next();
});
var ChatMessage = mongoose.model('ChatMessage' , ChatMessageSchema);
module.exports = ChatMessage;
I don't understand. If find records is a long process, how is it logged but not emitted and why is it emitted for clients that have connected already?
Does it need to run asynchronously? Can I use async/await or callbacks or ...??
You are using io.emit, io.emit is sending to all connected clients
For emitting only to the connected socket use socket.emit
By the way, you should check if error Isn’t null

Return the specific contents from a field of data in the result-set of a select query

I have select query that pulls a row of a data from a largeObject stored in a PostgreSQL table. The particular piece of data in the field is a html file.
I can output the name of the field of data into the console which appears as 16543 (for 16543kB).
So, my burning question is how I can return the actual contents (html) so that I can subsequently export it as one object and send it to the browser.
I am using node and express heres my source code so far:
var database = require('../database/postgresDB.js');
var pg = require('pg');
var html = {};
var connectionString = "postgres://dabladmin:dabldem#localhost:5432/dablpatient";
var client = new pg.Client(connectionString);
client.connect();
var query = client.query('SELECT * FROM htmlfiles WHERE id = 1', function(err, result){
console.log(JSON.stringify(result));
console.log(result.rows[0].htmlfile);
html = result.rows[0].htmlfile;
//return result.rows[0].htmlfile;
//console.dir(html);
});
module.exports = html;
This cannot be done directly. You need to export a function which will return the promise.
Following is an idea of how it can be done. Note: The code is not tested.
// htmlfile.model.js
const promise = require('bluebird'); // or any other Promise/A+ compatible library;
const initOptions = {
promiseLib: promise // overriding the default (ES6 Promise);
};
const pgp = require('pg-promise')(initOptions);
// Database connection details;
const cn = {
host: 'localhost', // 'localhost' is the default;
port: 5432, // 5432 is the default;
database: 'myDatabase',
user: 'myUser',
password: 'myPassword'
};
const db = pgp(cn); // database instance;
const getHtml = id => db.oneOrNone('SELECT * FROM htmlfiles WHERE id = $1', id);
module.exports = getHtml;
Inside some.controller.js:
const html_model = require('./htmlfile.model.js');
html_model.getHtml(1)
.then(data => {
if(data) {
// record found
res.send(data.htmlfile);
} else {
// record not found, do something else
}
})
.catch(error => {
// an error occurred
});

Resources