Why? Error: Can't set headers after they are sent - node.js

I keep getting this error: Error: Can't set headers after they are sent.
I have read other posts but I don't understand. I dont have any double callbacks or anything. Where in my code is causing this error?
Error: Can't set headers after they are sent.
at ServerResponse.OutgoingMessage.setHeader (http.js:689:11)
at ServerResponse.header (/root/node_modules/express/lib/response.js:666:10)
at ServerResponse.send (/root/node_modules/express/lib/response.js:146:12)
at fn (/root/node_modules/express/lib/response.js:900:10)
at View.exports.renderFile [as engine] (/root/node_modules/jade/lib/jade.js:330:12)
at View.render (/root/node_modules/express/lib/view.js:93:8)
at EventEmitter.app.render (/root/node_modules/express/lib/application.js:530:10)
at ServerResponse.res.render (/root/node_modules/express/lib/response.js:904:7)
at Query. (/root/tutsplus/server4.js:25:7)
at Query.emit (events.js:98:17)
var express = require('express'), app = express();
var mysql = require('mysql');
app.get('/', function(req,res) {
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: 'xxxx',
database: 'store'
});
var query = connection.query('SELECT * from category');
query.on('result', function(row) {
var category = row.category_name;
res.render('xxxx.jade', {
category: category
});
});
}); // app.get
app.listen(80, function() {
console.log('we are logged in');
});

As I said in my comment, this issue is nearly always caused by improper handling of asynchronous operations which causes pieces of the response to be called out of order.
Per the code example here that uses .on(), you need to end the request only when you get:
query.on('end', function() {
// all rows have been received
});
I think you are probably calling res.render() more than once because you're calling it in query.on('result', ...) rather than in query.on('end', ....) after all the data has been collected.
The fact that you're doing it in:
query.on('result', ...)
is probably the wrong timing issue causing the problem.
From the mysql nodejs connector documentation, here's an example:
var query = connection.query('SELECT * FROM posts');
query
.on('error', function(err) {
// Handle error, an 'end' event will be emitted after this as well
})
.on('fields', function(fields) {
// the field packets for the rows to follow
})
.on('result', function(row) {
// Pausing the connnection is useful if your processing involves I/O
connection.pause();
processRow(row, function() {
connection.resume();
});
})
.on('end', function() {
// all rows have been received
});

Related

Fastify plugin dependency - Registering MySQL before routes

I'm new to NodeJS and Fastify and I'm fairly sure I'm making a basic mistake here but I can't see what it is.
I have separated out my MySQL connection and routes into separate plugins. It appears that the MySQL plugin is not registering in time, leading to the routes not working.
I was under the impression that registering plugins was done asynchronously (loading one plugin at a time), but it can't seem to find "fastify.mysql.query".
Error:
"Cannot read properties of undefined (reading 'query')","stack":"TypeError: Cannot read properties of undefined (reading 'query')\n at Object.<anonymous> (/Users/dally/Projects/NodeJS/boodil-payments-api/routes/payments.js:4:23)\n at preHandlerCallback (/Users/dally/Projects/NodeJS/boodil-payments-api/node_modules/fastify/lib/handleRequest.js:126:28)\n at preValidationCallback (/Users/dally/Projects/NodeJS/boodil-payments-api/node_modules/fastify/lib/handleRequest.js:110:5)\n at handler (/Users/dally/Projects/NodeJS/boodil-payments-api/node_modules/fastify/lib/handleRequest.js:74:7)\n at handleRequest (/Users/dally/Projects/NodeJS/boodil-payments-api/node_modules/fastify/lib/handleRequest.js:22:5)\n at runPreParsing (/Users/dally/Projects/NodeJS/boodil-payments-api/node_modules/fastify/lib/route.js:487:5)\n at Object.routeHandler [as handler] (/Users/dally/Projects/NodeJS/boodil-payments-api/node_modules/fastify/lib/route.js:434:7)\n at Router.lookup (/Users/dally/Projects/NodeJS/boodil-payments-api/node_modules/find-my-way/index.js:368:14)\n at Server.emit (node:events:527:28)\n at parserOnIncoming (node:_http_server:956:12)"},"msg":"Cannot read properties of undefined (reading 'query')"}
Server.js
const fastify = require('fastify')({ logger: true })
fastify.register(require('./config/db'))
fastify.register(require('./routes/payments'))
const PORT = 2000
const start = async () => {
try {
await fastify.listen({ port: PORT })
}
catch (e) {
fastify.log.error(e)
process.exit(1)
}
}
start()
DB.js
module.exports = function (fastify, options, done) {
fastify.register(require('#fastify/mysql'), {
connectionString: 'mysql://root:password#localhost/boodil'
})
done()
}
payment.js
function paymentRoutes(fastify, opts, done) {
fastify.get('/get-transactions', (req, reply) => {
fastify.mysql.query(
'SELECT * FROM transactions',
function onResult(err, result) {
reply.send(err || result)
}
)
})
fastify.get('/get-transaction:id', (req, reply) => {
fastify.mysql.query(
'SELECT * FROM transactions where id = ?', [req.params.id],
function onResult(err, result) {
reply.send(err || result)
}
)
})
done()
}
module.exports = paymentRoutes
As said in comments, you need to use the fastify-plugin wrapper in your DB.js file.
To understand this error, you must get confident with fastify's encapsulation context.
It is worth reading these answers:
https://stackoverflow.com/a/61054534/3309466
https://stackoverflow.com/a/73586266/3309466
In practice: your database connection was isolated into a context that was unreachable by the paymentRoutes.
I'm coming from a PHP/Laravel background where this is quite standard
What fastify refuses is creating a global context where all you need is shared. Of course, you can do it, by it is not the default behaviour. You need to declare that the exported plugin has something that can be used by other contexts.

(node:23977) UnhandledPromiseRejectionWarning: Error: Missing credentials for "PLAIN"

I'm trying to send emails from the site to the post office Mail.ru, but the following error appears:
(node:25258) UnhandledPromiseRejectionWarning: Error: Missing credentials for "PLAIN"
at SMTPConnection._formatError (/home/user/server/node_modules/nodemailer/lib/smtp-connection/index.js:784:19)
at SMTPConnection.login (/home/user/server/node_modules/nodemailer/lib/smtp-connection/index.js:448:38)
at /home/user/server/node_modules/nodemailer/lib/smtp-transport/index.js:271:32
at SMTPConnection.<anonymous> (/home/user/server/node_modules/nodemailer/lib/smtp-connection/index.js:215:17)
at Object.onceWrapper (events.js:421:28)
at SMTPConnection.emit (events.js:315:20)
at SMTPConnection._actionEHLO (/home/user/server/node_modules/nodemailer/lib/smtp-connection/index.js:1313:14)
at SMTPConnection._processResponse (/home/user/server/node_modules/nodemailer/lib/smtp-connection/index.js:942:20)
at SMTPConnection._onData (/home/user/server/node_modules/nodemailer/lib/smtp-connection/index.js:749:14)
at TLSSocket.SMTPConnection._onSocketData (/home/user/server/node_modules/nodemailer/lib/smtp-connection/index.js:195:44)
(node:25258) UnhandledPromiseRejectionWarning: Unhandled promise rejection. This error originated either by throwing inside of an async function without a catch block, or by rejecting a promise which was not handled with .catch(). To terminate the node process on unhandled promise rejection, use the CLI flag `--unhandled-rejections=strict` (see https://nodejs.org/api/cli.html#cli_unhandled_rejections_mode). (rejection id: 1)
(node:25258) [DEP0018] DeprecationWarning: Unhandled promise rejections are deprecated. In the future, promise rejections that are not handled will terminate the Node.js process with a non-zero exit code.
And here is the backend code itself:
const express = require('express');
const nodemailer = require('nodemailer');
const Parser = require('body-parser');
const sqlite3 = require('sqlite3').verbose();
const server = express();
let path_services = ( __dirname + '/public/static/img/services/' );
let path_html = ( __dirname + '/public/index.html' );
let path_font = ( __dirname + '/public/static/font/' );
let path_css = ( __dirname + '/public/static/css/' );
let path_img = ( __dirname + '/public/static/img/' );
let path_js = ( __dirname + '/public/static/js/' );
let path_db = ( __dirname + '/Main.db' );
const Body_Parser = Parser.urlencoded({extended: false});
server.get('/', Body_Parser , function(request, response) {
response.sendFile(path_html);
});
server.post('/', Body_Parser, function(request, response) {
let db = new sqlite3.Database(path_db, sqlite3.OPEN_READWRITE, (err) => {
if(err) {
console.error(err.message);
}
console.log('Connected to Database!');
});
db.run('INSERT INTO Client(firstname, lastname, email, phone, message) VALUES($firstname, $lastname, $email, $phone, $message)', {
$firstname: request.body.ghostName,
$lastname: request.body.ghostSurname,
$email: request.body.ghostEmail,
$phone: request.body.ghostPhone,
$message: request.body.ghostMessage
}, function(err) {
if(err) {
console.log(err.message);
}
console.log('Good!');
response.end('Ok!');
});
db.close();
let testEmailAccount = nodemailer.createTestAccount();
let transporter = nodemailer.createTransport({
service: 'email',
host: 'smtp.ethereal.email',
port: 587,
secure: false,
auth: {
type: "login", // default
user: testEmailAccount.user,
pass: testEmailAccount.pass
}
});
let result = transporter.sendMail({
from: '"Node js" <nodejs#example.com>',
to: "chrome_777111777#mail.ru",
subject: "Message from Node js",
text: "This message was sent from Node js server.",
html: "This <i>message</i> was sent from <strong>Node js</strong> server."
});
console.log(result);
});
server.use(express.static (path_services) );
server.use(express.static (path_font) );
server.use(express.static (path_css) );
server.use(express.static (path_img) );
server.use(express.static (path_js) );
server.listen(8000);
I am asking you for help, because I have already visited a huge number of sites (starting with the official documentation and ending with a lot of unfamiliar forums), but everywhere I went it was said about sending emails to Gmail, about sending emails to email was said only in the documentation and on several other sites that copied information from the documentation. I will be very grateful to you if you help!
nodemailer.createTestAccount is an async function, that means, you just have to put the await keyword in front of it like that:
let testEmailAccount = await nodemailer.createTestAccount();
Otherwise testEmailAccount is a promise and testEmailAccount.user and testEmailAccount result as undefined.
In your case you also have to make your app.post callback an async function:
server.post('/', Body_Parser, async function(request, response) {
// ...
}

Saving data to Postgres from AWS Lambda

I'm building a lambda function that is supposed to save a game feedback, like a performance grade, into my Postgres database, which is in AWS RDS.
I'm using NodeJS typescript and the function is kinda working, but in a strange way.
I made an API Gateway so I can POST data to the URL to the lambda process it and save it, the thing is, when I POST the data the function seems to process it until it reaches a max limit of connected clients, and than it seems to lose the other clients'data.
Another problem is that every time I POST data I get a response saying that there was a Internal Server Error and with a 'X-Cache→Error from cloudfront' header. For a GET request I figured it out that it was giving me this response because the format of the response was incorrect, but in this case I fixed the response format and still get this problem...
Sometimes I get a timeout response.
My function's code:
import { APIGatewayEvent, Callback, Context, Handler } from "aws-lambda";
import { QueryConfig, Client, Pool, PoolConfig } from "pg";
export const insert: Handler = async (
event: APIGatewayEvent,
context: Context,
cb: Callback
) => {
// context.callbackWaitsForEmptyEventLoop = false;
const config: PoolConfig = {
user: process.env.PG_USER,
host: process.env.PG_HOST,
database: process.env.PG_DB,
password: process.env.PG_PASS,
port: parseInt(process.env.PG_PORT),
idleTimeoutMillis: 0,
max: 10000
};
const pool = new Pool(config);
let postdata = event.body || event;
console.log("POST DATA:", postdata);
if (typeof postdata == "string") {
postdata = JSON.parse(postdata);
}
let query: QueryConfig = <QueryConfig>{
name: "get_all_questions",
text:
"INSERT INTO gamefeedback (gameid, userid, presenterstars, gamestars) VALUES ($1, $2, $3, $4);",
values: [
parseInt(postdata["game_id"]),
postdata["user_id"],
parseInt(postdata["presenter_stars"]),
parseInt(postdata["game_stars"])
]
};
console.log("Before Connect");
let con = await pool.connect();
let res = await con.query(query);
console.log("res.rowCount:", res.rowCount);
if (res.rowCount != 1) {
cb(new Error("Error saving the feedback."), {
statusCode: 400,
body: JSON.stringify({
message: "Error saving data!"
})
});
}
cb(null, {
statusCode: 200,
body: JSON.stringify({
message: "Saved successfully!"
})
});
console.log("The End");
};
Than the log from CloudWatch error with max number of clients connected looks like this:
2018-08-03T15:56:04.326Z b6307573-9735-11e8-a541-950f760c0aa5 (node:1) UnhandledPromiseRejectionWarning: error: sorry, too many clients already
at u.parseE (/var/task/webpack:/node_modules/pg/lib/connection.js:553:1)
at u.parseMessage (/var/task/webpack:/node_modules/pg/lib/connection.js:378:1)
at Socket.<anonymous> (/var/task/webpack:/node_modules/pg/lib/connection.js:119:1)
at emitOne (events.js:116:13)
at Socket.emit (events.js:211:7)
at addChunk (_stream_readable.js:263:12)
at readableAddChunk (_stream_readable.js:250:11)
at Socket.Readable.push (_stream_readable.js:208:10)
at TCP.onread (net.js:607:20)
Can any of you guys help me with this strange problem?
Thanks
Well for one thing you need to put creating a pool above the handler, like so:
const config: PoolConfig = {
user: process.env.PG_USER,
...
};
const pool = new Pool(config);
export const insert: Handler = async (
event: APIGatewayEvent,
context: Context,
cb: Callback
) => {
..etc
The way you have it you are creating a pool on every invocation. If you create the pool outside the handler it gives Lambda a chance to share the pool between invocations.

net.Stream is not a constructor - Node Postgres

I'm trying to connect a Node.js app with a PostgreSQL server. It seems that no matter what I use, I end up with the same error:
bundle.js:16177 ERROR: TypeError: net.Stream is not a constructor
at new Connection (bundle.js:10133)
at new Client (bundle.js:9704)
at Object.create (bundle.js:11308)
at Pool._createResource (bundle.js:510)
at Pool.dispense [as _dispense] (bundle.js:498)
at Pool.acquire (bundle.js:573)
at Pool.pool.connect (bundle.js:11359)
at PG.connect (bundle.js:10876)
at bundle.js:1642
At first I was declaring a new pg.Client() like the example in the documentation here, but got the above error discovered that might be a bad idea according to this stack overflow post.
I tried using pg.connect():
var pg = require('pg'); //postgresql dependency
var connectionString = "postgres://postgres:thisissuchagoodpassword#PostgreSQL/localhost:5432/Milestone1DB"
console.log("Initiating...");
//var connectionString = "postgres://postgres:thisissuchagoodpassword#PostgreSQL9.6/localhost:5432/Milestone1DB";
//var client = new pg.Client();
//connect to the database
console.log("Attempting to connect to the database");
pg.connect(function (err, client, done)
{
if(err)
{
console.log("Error connecting to the database.");
throw err;
}
client.query("SELECT DISTINCT state FROM business ORDER BY state", function (err, result)
{
if(err)
{
console.log("Query resulted in an error.");
throw err;
}
console.log(result.rows[0]);
client.end(function (err)
{
if(err)
{
console.log("Error disconnecting from the databse.");
throw err;
}
});
});
});
Here is the pg-promise code that I tried:
var pgp = require('pg-promise');
var cn = {
host: 'localhost', // server name or IP address;
port: 5432,
database: 'Milestone1DB',
user: 'postgres',
password: 'thisissuchagoodpassword'
};
var db = pgp(cn); // database instance;
db.any("select distict state from business order by state;")
.then(data => {
console.log("DATA:", data);
})
.catch(error => {
console.log("ERROR:", error);
});
I must be missing something, but I don't know where to look. Thank you to anyone who can help me figure out what this error means.
Make sure you are not crossing a context boundary that is corrupting the net prototype chain and stripping away methods like Stream(). I ran into a similar unhandled Promise exception w Node 7.5 and pg-live-select. However it was intermittent because of the way the net reference was being passed around. I ended up using V8 inspector and putting a 'debugger' statement directly above line 13 in connection.js to catch the corruption.
node_modules/lib/connection.js:13
this.stream = config.stream || new net.Stream();
^
TypeError: net.Stream is not a constructor
at new Connection (node_modules/pg-live-select/node_modules/pg/lib/connection.js:13:34)
at new Client (node_modules/pg-live-select/node_modules/pg/lib/client.js:26:37)
at Object.create (node_modules/pg-live-select/node_modules/pg/lib/pool.js:27:24)
at Pool._createResource (node_modules/generic-pool/lib/generic-pool.js:325:17)
at Pool.dispense [as _dispense] (node_modules/generic-pool/lib/generic-pool.js:313:12)
at Pool.acquire (node_modules/generic-pool/lib/generic-pool.js:388:8)
at Pool.pool.connect (node_modules/pg-live-select/node_modules/pg/lib/pool.js:78:14)
at PG.connect (node_modules/pg-live-select/node_modules/pg/lib/index.js:49:8)
at LivePg._updateQuery (node_modules/pg-live-select/index.js:295:6)
at node_modules/pg-live-select/index.js:160:14
at Array.forEach (native)
at Timeout.performNextUpdate [as _onTimeout] (node_modules/pg-live-select/index.js:159:23)
at ontimeout (timers.js:365:14)
at tryOnTimeout (timers.js:237:5)
at Timer.listOnTimeout (timers.js:207:5)

Error using Redis Multi with nodejs

I am using Redis and consulting it from nodejs, using the module Redis.
When i exec a client.multi() and the redis server is down the callback doesn't send the error and the nodejs app terminates.
This is the error
/Users/a/db/node_modules/redis/index.js:151
throw callback_err;
^
TypeError: Cannot read property 'length' of undefined
at Command.callback (/Users/a/db/node_modules/redis/index.js:1098:35)
at RedisClient.flush_and_error (/Users/a/db/node_modules/redis/index.js:148:29)
at RedisClient.on_error (/Users/a/db/node_modules/redis/index.js:184:10)
at Socket.<anonymous> (/Users/a/db/node_modules/redis/index.js:95:14)
at Socket.EventEmitter.emit (events.js:95:17)
at net.js:441:14
at process._tickCallback (node.js:415:13)
this is my code:
Constructor class
var redis = require('redis');
var client;
function Redis(){
client = redis.createClient();
client.on("error", function (err) {
console.log("Error " + err);
});
}
Redis.prototype.multi = function(commands,callback){
var err = null;
client.multi(commands).exec(function (error, res) {
if(error){
process.nextTick(function(){
callback(error,null)
})
}else{
process.nextTick(function(){
callback(null,res)
})
}
});
}
FYI, I ran across this in an old lib that depended on old version of node_redis.
This issue was a bug and was fixed in v0.9.1 - November 23, 2013: https://github.com/mranney/node_redis/pull/457
I think that people are still reaching here... (not sure if this answers this specific question directly, but I assume people reaching here since the multi.exec() returns true / the event loop is not waiting for it's response.
After the fixes that went in (in node-redis), it is possible to wrap the result of exec with Promise, and then you will be sure that the result will include the replies from the multi.
So, you can add some redis commands to the multi:
await multi.exists(key);
await multi.sadd(key2,member);
And then in the result do something like:
return new Promise((resolve, reject) => {
multi.exec((err, replies) => {
if (err) {
reject(err);
}
return resolve(replies);
});
});
Otherwise, if you will just do: const reply = await multi.exec();
it will just return you true, and not the replies
** Important to mention - this refers to 'async-redis' and 'node-redis'

Resources