Nodejs - Axios - Timeouts lead to memory leak - node.js

I'm running an app which basically:
Receives a request through Express.js
Send multiples requests to various endpoints
Once those endpoints responses, we compute a response and send it to the client.
I'm using Axios Instances per endpoints to send the requests.
const axios = require('axios');
const Agent = require('agentkeepalive');
const { HttpsAgent } = Agent;
const httpKeepAliveAgent = new Agent({
maxSockets: 100,
maxFreeSockets: 10,
timeout: 60000, // active socket keepalive for 60 seconds
freeSocketTimeout: 30000, // free socket keepalive for 30 seconds
});
const httpsKeepAliveAgent = new HttpsAgent({
maxSockets: 100,
maxFreeSockets: 10,
timeout: 60000, // active socket keepalive for 60 seconds
freeSocketTimeout: 30000, // free socket keepalive for 30 seconds
});
const createAxiosInstance = () => axios.create({
httpAgent: httpKeepAliveAgent,
httpsAgent: httpsKeepAliveAgent,
maxRedirects: 10,
});
I send requests to third party endpoint using the following
const fakeServer = require('../test/fake-server');
const logger = require('../utils/logger');
const { LOG_VERBOSE } = process.env;
// let promiseCount = 0;
module.exports = async (axiosInstance, ssp, payload, endpoint, method, timeout, headers) => {
const cmd = process.env.NODE_ENV === 'test' ? fakeServer : axiosInstance;
const start = Date.now();
const config = {
ssp,
url: endpoint,
method,
timeout,
headers: {
'Content-Type': 'application/json; charset=utf-8;',
Accept: 'application/json',
},
data: payload,
};
if (headers !== undefined && typeof headers === 'object') {
// eslint-disable-next-line no-return-assign
Object.keys(headers).forEach((key) => config.headers[key] = headers[key]);
}
try {
const response = await cmd(config);
return {
ssp,
uri: config.url,
requestbody: payload,
requestheaders: config.headers,
responsebody: response.data,
status: response.status,
responsetimemillis: Date.now() - start,
};
} catch (error) {
if (LOG_VERBOSE === 'true') logger.error(`Error on ${ssp} call: ${error.message}`);
let responsebody;
let status;
if (error.response === undefined) {
responsebody = error.code;
status = error.code;
} else {
responsebody = error.response.data ? error.response.data : error.message;
status = error.response.status;
}
return {
ssp,
uri: config.url,
requestbody: payload,
requestheaders: config.header,
responsebody,
status,
responsetimemillis: Date.now() - start,
};
}
};
The issues is that when I get timeouts or 400 error, there might happen some memory leaks, I can't get a successful linked connection, and so at the end the app crashes...
Any ideas?

https://github.com/axios/axios/issues/2783
maxRedirects may be causing this issue, try setting lower redirects or 0, as the resolved issue suggests.

Related

FaunaDB returns empty array (FaunaDB + Netlify + VueJS)

My code is based on the repository - https://github.com/ttntm/recept0r-ts
Code from "\functions\read-all.js":
const faunadb = require('faunadb');
const fnHeaders = require('./_shared/headers.js');
exports.handler = (event, context) => {
const client = new faunadb.Client({
secret: process.env.FAUNA_SECRET,
domain: 'db.fauna.com',
scheme: 'https',
port: '443'
});
const q = faunadb.query;
const headers = { ...fnHeaders };
const origin = event.headers.Origin || event.headers.origin;
headers['Access-Control-Allow-Origin'] = origin ? origin : '*';
return client.query(q.Paginate(q.Match(q.Index('all_users'), false), { size: 500 }))
.then((response) => {
const listRefs = response.data;
const getListDataQuery = listRefs.map(ref => q.Get(ref)); // create new query out of list refs, then query the refs
return client.query(getListDataQuery).then((records) => {
return { statusCode: 200, headers: headers, body: JSON.stringify(records) }
})
})
.catch((error) => {
return { statusCode: 400, headers: headers, body: JSON.stringify(error) }
});
}
Code from "\src\store\modules\data.js":
async readAll({ commit, dispatch, rootGetters })
{
const fn = rootGetters['app/functions'];
const request = await fetch(fn.readAll, { method: 'GET' });
const response = await request.json();
if (response.length > 0) {
commit('SET_ALL_RECIPES', response);
commit('SET_LAST_UPDATED', new Date); }
else {
dispatch('app/sendToastMessage', { text: 'Error loading recipes. Please try again later.', type: 'error' }, { root: true });
return 'error';
}
}
Everything seems to be set. For example, this code works:
client.query(q.CreateCollection({ name: 'someCollection' }))
But can't read any data.
If launch application by "netlify dev" (localhost) - "read-all" returns empty array ("[]").
If launch application by "network" - "read-all" returns default "index.html".
I have no idea what's wrong. Maybe someone give advice...
I found a similar question - Local Netlify function server gives strange response instead of FaunaDB data
Some answer:
"In my experience, one of the most common reasons for this error is a routing problem, which is triggering a 404 response route serving HTML instead of your expected function handler."
This code works:
return client.query(q.Paginate(q.Documents(q.Collection('customers')), { size: 500 }))
.then((response) => {
const listRefs = response.data;
const getListDataQuery = listRefs.map(ref => q.Get(ref)); // create new query out of list refs, then query the refs
return client.query(getListDataQuery).then((records) => {
return { statusCode: 200, headers: headers, body: JSON.stringify(records) }
});
})
.catch((error) => {
return { statusCode: 400, headers: headers, body: JSON.stringify(error) }
});

Memory Leak with axios-module from Nuxt

I have a memory leak using #nuxtjs/axios. I don't really know if it's my code or the plugin.
My project requires 2 axios instances to run.
One to get an "anonymous token" to be allowed to access the API.
And a second one to use that token and make my API calls.
const createDefaultClient = (agent, $axios, $appInsights, inject) => {
const defaultClient = $axios.create({
withCredentials: true,
httpsAgent: agent
})
defaultClient.setBaseURL(
process.server ? 'http://localhost:3000' : window.location.origin
)
defaultClient.onResponseError(error => {
logError(error, $appInsights)
return Promise.reject(error)
})
inject('defaultClient', defaultClient)
return defaultClient
}
const createApiClient = (
agent,
$axios,
$cookies,
$appInsights,
defaultClient,
inject,
request
) => {
const apiClient = $axios.create({
withCredentials: true,
httpsAgent: agent
})
apiClient.setBaseURL(process.env.NUXT_ENV_BASE_API)
apiClient.onRequest(config => {
const cookieToken = $cookies.get(accessTokenCookieName)
if (cookieToken) {
config.headers.common.Authorization = `Bearer ${cookieToken}`
}
debug(`${config.__isRetryRequest ? 'Retry' : 'Request'}: ${config.url}`)
})
apiClient.onResponseError(async error => {
const originalRequest = error.config
debug(`Error ${get(error, 'response.status')}: ${error.config.url}`)
// if we get error 401 (token expiration) we will refresh the token and retry to access API
if (error.config && error.response && error.response.status === 401) {
originalRequest.__isRetryRequest = true
const refreshToken = $cookies.get(refreshTokenCookieName)
let fetchAnonymousToken = true
let response = null
// if we had a cookie try to refresh it
if (refreshToken) {
response = await defaultClient.$post(`/forms/refresh`, {
refreshToken
})
if (!response) throw new Error('Auth failure')
if (process.client) {
createAuthCookies($cookies, response, request)
}
fetchAnonymousToken = false
}
// else fetch an anonymous cookie
if (fetchAnonymousToken) {
response = await defaultClient.$get(`/forms/anonymous`)
if (!response) throw new Error('Auth failure')
if (process.client) {
createAuthCookies($cookies, response, request)
}
}
// resend API request with the new valid token
originalRequest.headers.Authorization = `Bearer ${get(
response,
'access_token'
)}`
return apiClient(originalRequest)
} else {
logError(error, $appInsights)
return Promise.reject(error)
}
})
inject('apiClient', apiClient)
}
export default function({ $axios, app, req }, inject) {
const { $cookies} = app
const agent = new https.Agent({
rejectUnauthorized: process.env.NODE_ENV !== 'development'
})
const defaultClient = createDefaultClient(agent, $axios, inject)
createApiClient(
agent,
$axios,
$cookies,
defaultClient,
inject,
req
)
}
Basically I've followed the doc here : https://axios.nuxtjs.org/extend
I've commented everything in my projet and without the API calls the memory get garbage collected.
Here the memory before the charge
Here after the charge
After a siege, we can clearly see that the memory won't go down :/
Please help me i'm desesperate..

Reuse TCP connection with node-fetch in node.js

I am using this function to call an external API
const fetch = require('node-fetch');
fetchdata= async function (result = {}) {
var start_time = new Date().getTime();
let response = await fetch('{API endpoint}', {
method: 'post',
body: JSON.stringify(result),
headers: { 'Content-Type': 'application/json' },
keepalive: true
});
console.log(response)
var time = { 'Respone time': + (new Date().getTime() - start_time) + 'ms' };
console.log(time)
return [response.json(), time];
}
The problem is that i am not sure that node.js is reusing the TCP connection to the API every time i use this function, eventhough i defined the keepalive property.
Reusing the TCP connection can significantly improve response time
Any suggestions will be welcomed.
As documented in https://github.com/node-fetch/node-fetch#custom-agent
const fetch = require('node-fetch');
const http = require('http');
const https = require('https');
const httpAgent = new http.Agent({ keepAlive: true });
const httpsAgent = new https.Agent({ keepAlive: true });
const agent = (_parsedURL) => _parsedURL.protocol == 'http:' ? httpAgent : httpsAgent;
const fetchdata = async function (result = {}) {
var start_time = new Date().getTime();
let response = await fetch('{API endpoint}', {
method: 'post',
body: JSON.stringify(result),
headers: { 'Content-Type': 'application/json' },
agent
});
console.log(response)
var time = { 'Respone time': + (new Date().getTime() - start_time) + 'ms' };
console.log(time)
return [response.json(), time];
}
Here's a wrapper around node-fetch based on their documentation:
import nodeFetch, { RequestInfo, RequestInit, Response } from "node-fetch";
import http from "http";
import https from "https";
const httpAgent = new http.Agent({
keepAlive: true
});
const httpsAgent = new https.Agent({
keepAlive: true
});
export const fetch = (url: RequestInfo, options: RequestInit = {}): Promise<Response> => {
return nodeFetch(url, {
agent: (parsedURL) => {
if (parsedURL.protocol === "http:") {
return httpAgent;
} else {
return httpsAgent;
}
},
...options
});
};
Keep-alive is not enabled for the default used agent and is not currently implemented into node-fetch directly, but you can easily specify a custom-agent where you enable the keep-alive option:
const keepAliveAgent = new http.Agent({
keepAlive: true
});
fetch('{API endpoint}', {
...
agent: keepAliveAgent
});
here is a wrapper for node-fetch to add a keepAlive option, based on Ilan Frumer's answer
// fetch: add option keepAlive with default true
const fetch = (function getFetchWithKeepAlive() {
const node_fetch = require('node-fetch');
const http = require('http');
const https = require('https');
const httpAgent = new http.Agent({ keepAlive: true });
const httpsAgent = new https.Agent({ keepAlive: true });
return async function (url, userOptions) {
const options = { keepAlive: true };
Object.assign(options, userOptions);
if (options.keepAlive == true)
options.agent = (parsedUrl => parsedUrl.protocol == 'http:' ? httpAgent : httpsAgent);
delete options.keepAlive;
return await node_fetch(url, options);
}
})();
const response = await fetch('https://github.com/');
const response = await fetch('https://github.com/', { keepAlive: false });

Graphql Connect Error - Only getting Query or Subscription or both

We are encountering an error in the setup of our application to the graphql connection. Our main need is Subscriptions, currently our code handles these fine. But our queries respond with "Leads not found in query_root"
When we modify the positioning of our connection code, the query now will work but subscriptions stop connecting.
const scheme = proto => {
return window.location.protocol === 'https:' ? `${proto}s` : proto;
};
const wsurl = `${scheme('ws')}://${HASURA_GRAPHQL_ENGINE_HOSTNAME}/v1/graphql`;
const httpurl = `${scheme('http')}://${HASURA_GRAPHQL_ENGINE_HOSTNAME}/v1/graphql`;
// Create a WebSocket link:
const wsLink = new WebSocketLink({
uri: wsurl,
options: {
reconnect: true,
connectionParams: {
headers: {
'x-hasura-access-key': `${HASURA_ACCESS_KEY}`
}
}
}
});
const httpLink = new HttpLink({
uri: httpurl,
options: {
connectionParams: {
headers: {
'x-hasura-access-key': `${HASURA_ACCESS_KEY}`
}
}
}
});
const link = split(
// split based on operation type
({ query }) => {
const { kind, operation } = getMainDefinition(query);
return kind === 'OperationDefinition' && operation === 'subscription';
},
wsLink,
httpLink
);
const client = new ApolloClient({
link,
cache: new InMemoryCache()
});
export default client;
In the link const, if we put wsLink below httpLink then wsLink stops and httpLink works.

node-fetch timeout issue/ express timeout/ lambda timeout or sth else?

below code works fine when running locally, but "getAccessToken ()" does not work as expected when running inside an aws lambda function, when doing "POST" to /webhook endpoint. I am using node 8.10, "aws-serverless-express": "^3.3.6", "express": "^4.16.4" and "node-fetch": "^2.5.0", basically it print the correct jwt token but node-fetch does not return anything, sample log.
START RequestId: c8efba59-1869-4eaa-b9d8-aa15a7507d52 Version: $LATEST
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 start getExecution
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 exectution_url:
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 https://cloudmanager.adobe.io/somevalidurl
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 start getAccessToken
END RequestId: c8efba59-1869-4eaa-b9d8-aa15a7507d52
REPORT RequestId: c8efba59-1869-4eaa-b9d8-aa15a7507d52 Duration: 6657.00 ms Billed Duration: 6700 ms Memory Size: 128 MB Max Memory Used: 37 MB
I made sure lambda timeout is 30 seconds, tried to disable "node-fetch" timeout by setting it to 0 and , used a middleware for all the routes "app.use(timeout("30000"))" also for that specific webhook request timeout. (I receive the 200 pong response immediately but getexectuion async function does not work properly)
const express = require('express')
const bodyParser = require('body-parser')
const crypto = require('crypto')
const jsrsasign = require('jsrsasign')
const fetch = require('node-fetch')
const timeout = require('connect-timeout')
const URL = require('url').URL
const URLSearchParams = require('url').URLSearchParams
//require('dotenv').config()
const app = express()
async function getAccessToken () {
console.log("start getAccessToken")
const EXPIRATION = 60 * 60 // 1 hour
const header = {
'alg': 'RS256',
'typ': 'JWT'
}
const payload = {
'exp': Math.round(new Date().getTime() / 1000) + EXPIRATION,
'iss': process.env.ORGANIZATION_ID,
'sub': process.env.TECHNICAL_ACCOUNT_ID,
'aud': `https://ims-na1.adobelogin.com/c/${process.env.API_KEY}`,
'https://ims-na1.adobelogin.com/s/ent_cloudmgr_sdk': true
}
const jwtToken = jsrsasign.jws.JWS.sign('RS256', JSON.stringify(header), JSON.stringify(payload), process.env.PRIVATE_KEY)
//console.log("jwt token:")
//console.log(jwtToken)
const body = new URLSearchParams({
client_id: process.env.API_KEY,
client_secret: process.env.CLIENT_SECRET,
jwt_token: jwtToken
})
const response = await fetch('https://ims-na1.adobelogin.com/ims/exchange/jwt', {
method: 'POST',
options: { timeout: 0},
timeout: 0,
size: 0,
body: body
})//.catch(error => {
// console.log("an error happend in fetchg")
// console.log(error)
//})
const json = await response.json()
if ((response.status !== 200) && (response.status !== 201)) {
console.error(`Invalid response status ${ response.status }.`);
throw json;
}
console.log("access_token:")
console.log(json['access_token'])
return json['access_token']
}
async function makeApiCall (accessToken, url, method) {
console.log("start make api call")
const response = await fetch(url, {
'method': method,
'headers': {
'x-gw-ims-org-id': process.env.ORGANIZATION_ID,
'x-api-key': process.env.API_KEY,
'Authorization': `Bearer ${accessToken}`
}
})
console.log("finish make api call")
const json = await response.json()
return json
}
function getLink (obj, linkType) {
return obj['_links'][linkType].href
}
async function getExecution (executionUrl) {
console.log("start getExecution")
console.log("exectution_url:")
console.log(executionUrl)
const accessToken = await getAccessToken()
console.log("access-token:")
console.log(accessToken)
const execution = await makeApiCall(accessToken, executionUrl, 'GET')
console.log(execution)
console.log("aaaa")
const program = await makeApiCall(accessToken, new URL(getLink(execution, 'http://ns.adobe.com/adobecloud/rel/program'), executionUrl))
console.log(execution)
console.log("here")
execution.program = program
return execution
}
//app.use(bodyParser.json())
app.use(bodyParser.json({
verify: (req, res, buf, encoding) => {
const signature = req.header('x-adobe-signature')
if (signature) {
const hmac = crypto.createHmac('sha256', process.env.CLIENT_SECRET)
hmac.update(buf)
const digest = hmac.digest('base64')
if (signature !== digest) {
throw new Error('x-adobe-signature HMAC check failed')
}
} else if (!process.env.DEBUG && req.method === 'POST') {
throw new Error('x-adobe-signature required')
}
}
}))
app.use(timeout("30000"))
app.post('/webhook', (req, res) => {
req.setTimeout(120000, function(){
console.log('Request has timed out.');
res.send(408);
});
res.writeHead(200, { 'Content-Type': 'application/text' })
res.end('pong')
getExecution("https://cloudmanager.adobe.io/<somevalidurl>").then(execution => {
console.log(`Execution for ${execution.program.name} started`)
})
})
module.exports = app;
//const port = process.env.PORT || 3000
//app.listen(port, () =>
// console.log(`App is listening on port ${port}.`)
//)

Resources