Reuse TCP connection with node-fetch in node.js - node.js

I am using this function to call an external API
const fetch = require('node-fetch');
fetchdata= async function (result = {}) {
var start_time = new Date().getTime();
let response = await fetch('{API endpoint}', {
method: 'post',
body: JSON.stringify(result),
headers: { 'Content-Type': 'application/json' },
keepalive: true
});
console.log(response)
var time = { 'Respone time': + (new Date().getTime() - start_time) + 'ms' };
console.log(time)
return [response.json(), time];
}
The problem is that i am not sure that node.js is reusing the TCP connection to the API every time i use this function, eventhough i defined the keepalive property.
Reusing the TCP connection can significantly improve response time
Any suggestions will be welcomed.

As documented in https://github.com/node-fetch/node-fetch#custom-agent
const fetch = require('node-fetch');
const http = require('http');
const https = require('https');
const httpAgent = new http.Agent({ keepAlive: true });
const httpsAgent = new https.Agent({ keepAlive: true });
const agent = (_parsedURL) => _parsedURL.protocol == 'http:' ? httpAgent : httpsAgent;
const fetchdata = async function (result = {}) {
var start_time = new Date().getTime();
let response = await fetch('{API endpoint}', {
method: 'post',
body: JSON.stringify(result),
headers: { 'Content-Type': 'application/json' },
agent
});
console.log(response)
var time = { 'Respone time': + (new Date().getTime() - start_time) + 'ms' };
console.log(time)
return [response.json(), time];
}

Here's a wrapper around node-fetch based on their documentation:
import nodeFetch, { RequestInfo, RequestInit, Response } from "node-fetch";
import http from "http";
import https from "https";
const httpAgent = new http.Agent({
keepAlive: true
});
const httpsAgent = new https.Agent({
keepAlive: true
});
export const fetch = (url: RequestInfo, options: RequestInit = {}): Promise<Response> => {
return nodeFetch(url, {
agent: (parsedURL) => {
if (parsedURL.protocol === "http:") {
return httpAgent;
} else {
return httpsAgent;
}
},
...options
});
};

Keep-alive is not enabled for the default used agent and is not currently implemented into node-fetch directly, but you can easily specify a custom-agent where you enable the keep-alive option:
const keepAliveAgent = new http.Agent({
keepAlive: true
});
fetch('{API endpoint}', {
...
agent: keepAliveAgent
});

here is a wrapper for node-fetch to add a keepAlive option, based on Ilan Frumer's answer
// fetch: add option keepAlive with default true
const fetch = (function getFetchWithKeepAlive() {
const node_fetch = require('node-fetch');
const http = require('http');
const https = require('https');
const httpAgent = new http.Agent({ keepAlive: true });
const httpsAgent = new https.Agent({ keepAlive: true });
return async function (url, userOptions) {
const options = { keepAlive: true };
Object.assign(options, userOptions);
if (options.keepAlive == true)
options.agent = (parsedUrl => parsedUrl.protocol == 'http:' ? httpAgent : httpsAgent);
delete options.keepAlive;
return await node_fetch(url, options);
}
})();
const response = await fetch('https://github.com/');
const response = await fetch('https://github.com/', { keepAlive: false });

Related

AWS cloudwatch API canary statuscode comes as failed even the logged(logged in cloudwatch logs) statusCode is 200

I have a API canary setup for testing the availability of APIs. The statusCode of the API is tracked in cloudwatch logs. I am fetching files from S3. Those requests are coming with 200 statusCode as the same available in cloudwatch Logs. But in case of my API testing, in cloudwatch logs the statuscode prints as 200. But in Canary page it comes as failed.
Please find the attached canary page image
The first two request are S3 requests, others are my API requests.
Also I attached the cloudwatch logs screenshot
And my API canary script is,
var synthetics = require('Synthetics');
const log = require('SyntheticsLogger');
const https = require('https');
const http = require('http');
const syntheticsConfiguration = synthetics.getConfiguration();
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const apiCanaryBlueprint = async function () {
const verifyRequest = async function (res) {
return new Promise((resolve, reject) => {
log.info("status: " + res.statusCode + ' ' + res);
if (res.statusCode !== 200 && res.statusCode !== 201) {
log.info("Failed: " + res.statusCode + ' ' + res.statusMessage);
throw res.statusCode + ' ' + res.statusMessage;
}
let responseBody = '';
res.on('data', (d) => {
responseBody = d;
responseBody += d; // tried in this way as well
log.info("Response2: " + responseBody);
resolve();
});
res.on('end', () => {
resolve();
});
});
}
const getS3Certificate = async function (){
const certificate = {
Bucket : 'bucket',
Key : 'key',
};
const certificates3 = await s3.getObject(certificate).promise()
return certificates3.Body;
}
const getS3Key = async function(){
const key = {
Bucket : 'bucket',
Key : 'key',
};
const keys3 = await s3.getObject(key).promise()
return keys3.Body;
}
const S3Certificate = await getS3Certificate()
const S3Key = await getS3Key()
let projectId = Math.floor(Math.random()*1E16);
let applicationID = Math.floor(Math.random()*1E16);
projectId = projectId.toString().match(/.{1,4}/g).join('-');
let subscribedCount = Math.round(Math.random()*1E4);
let requestData = [
{
hostname: 'hostname.com',
method: 'get',
path: `/api/v1.0/project/123`,
port: 443,
protocol: 'https:',
key:S3Key,
cert:S3Certificate,
headers:{"header1":"a"}
},
{
hostname: 'hostname.com',
method: 'POST',
path: `/api/v1.0/project/${projectId}`,
port: 443,
key:S3Key,
cert:S3Certificate,
headers:{"header1":"a"},
body:JSON.stringify({ "isActive": true, "licenseItems": [...licenseItemsDetails] })
},
{
hostname: 'hostname.com',
method: 'PUT',
path: '/api/v1.0/project/canary/licenses',
port: 443,
key:S3Key,
cert:S3Certificate,
headers:{"header1":"a"},
body:JSON.stringify({"licenseItems": [...licenseItemsDetails]})
},
{
hostname: 'hostname.com',
method: 'DELETE',
path: '/api/v1.0/project/canary/canaryapp_001',
port: 443,
key:S3Key,
cert:S3Certificate,
headers:{"header1":"a"}
},
{
hostname: 'hostname.com',
method: 'PUT',
path: '/api/v1.0/project/canary/state',
port: 443,
key:S3Key,
cert:S3Certificate,
headers:{"header1":"a"},
body:JSON.stringify({"isActive": true})
},
{
hostname: 'hostname.com',
method: 'DELETE',
path: `/api/v1.0/project/${projectId}`,
port: 443,
key:S3key,
cert:S3Cert,
headers:{"header1":"a"}
},
]
requestData.forEach(async (request) => {
request['headers']['User-Agent'] = [synthetics.getCanaryUserAgentString(), request['headers']['User-Agent']].join(' ');
// await verifyRequest(request);
let stepConfig = {
includeRequestHeaders: true,
includeResponseHeaders: true,
restrictedHeaders: ['X-Amz-Security-Token', 'Authorization'],
includeRequestBody: true,
includeResponseBody: true
};
await synthetics.executeHttpStep('Verify LMS APIs', request, verifyRequest, stepConfig);
})
};
exports.handler = async () => {
return await apiCanaryBlueprint();
};
In the verifyRequest method, the request data is sent in an array to the method.
This is causing a problem. Even though I put await on every call, I am not getting expected data(responseBody becomes empty string) on res.on('end' => {callback})
So I changed my code with separate request and separate await function, this fixed the problem.
let requestOptionsStep8 = {
hostname: 'hostname.com',
method: 'DELETE',
path: `/api/v1.0/project/projectId`,
port: 443,
key:key,
cert:cert,
headers:{'Content-Type':'application/json','ALE-LMS-Operation-Id':'a','Accept':'*/*'}
}
let requestOptionsStep9 = {
hostname: 'hostname.com',
method: 'PATCH',
path: `/api/v1.0/project/canary3/canary1`,
port: 443,
key:key,
cert:cert,
headers:{'Content-Type':'application/json','ALE-LMS-Operation-Id':'a','Accept':'*/*'},
body:JSON.stringify({JSON body})
}
requestOptionsStep['headers']['User-Agent'] =
[synthetics.getCanaryUserAgentString(), requestOptionsStep['headers']['User-Agent']].join(' ');
requestOptionsStep1['headers']['User-Agent'] =
[synthetics.getCanaryUserAgentString(), requestOptionsStep1['headers']['User-Agent']].join(' ');
let stepConfig1 = {
includeRequestHeaders: true,
includeResponseHeaders: true,
includeRequestBody: true,
includeResponseBody: true,
continueOnHttpStepFailure: true
};
await synthetics.executeHttpStep('Verify hostname.com', requestOptionsStep, validateSuccessful, stepConfig1);
await synthetics.executeHttpStep('Verify hostname.com', requestOptionsStep1, validateSuccessful, stepConfig1);

Nodejs - Axios - Timeouts lead to memory leak

I'm running an app which basically:
Receives a request through Express.js
Send multiples requests to various endpoints
Once those endpoints responses, we compute a response and send it to the client.
I'm using Axios Instances per endpoints to send the requests.
const axios = require('axios');
const Agent = require('agentkeepalive');
const { HttpsAgent } = Agent;
const httpKeepAliveAgent = new Agent({
maxSockets: 100,
maxFreeSockets: 10,
timeout: 60000, // active socket keepalive for 60 seconds
freeSocketTimeout: 30000, // free socket keepalive for 30 seconds
});
const httpsKeepAliveAgent = new HttpsAgent({
maxSockets: 100,
maxFreeSockets: 10,
timeout: 60000, // active socket keepalive for 60 seconds
freeSocketTimeout: 30000, // free socket keepalive for 30 seconds
});
const createAxiosInstance = () => axios.create({
httpAgent: httpKeepAliveAgent,
httpsAgent: httpsKeepAliveAgent,
maxRedirects: 10,
});
I send requests to third party endpoint using the following
const fakeServer = require('../test/fake-server');
const logger = require('../utils/logger');
const { LOG_VERBOSE } = process.env;
// let promiseCount = 0;
module.exports = async (axiosInstance, ssp, payload, endpoint, method, timeout, headers) => {
const cmd = process.env.NODE_ENV === 'test' ? fakeServer : axiosInstance;
const start = Date.now();
const config = {
ssp,
url: endpoint,
method,
timeout,
headers: {
'Content-Type': 'application/json; charset=utf-8;',
Accept: 'application/json',
},
data: payload,
};
if (headers !== undefined && typeof headers === 'object') {
// eslint-disable-next-line no-return-assign
Object.keys(headers).forEach((key) => config.headers[key] = headers[key]);
}
try {
const response = await cmd(config);
return {
ssp,
uri: config.url,
requestbody: payload,
requestheaders: config.headers,
responsebody: response.data,
status: response.status,
responsetimemillis: Date.now() - start,
};
} catch (error) {
if (LOG_VERBOSE === 'true') logger.error(`Error on ${ssp} call: ${error.message}`);
let responsebody;
let status;
if (error.response === undefined) {
responsebody = error.code;
status = error.code;
} else {
responsebody = error.response.data ? error.response.data : error.message;
status = error.response.status;
}
return {
ssp,
uri: config.url,
requestbody: payload,
requestheaders: config.header,
responsebody,
status,
responsetimemillis: Date.now() - start,
};
}
};
The issues is that when I get timeouts or 400 error, there might happen some memory leaks, I can't get a successful linked connection, and so at the end the app crashes...
Any ideas?
https://github.com/axios/axios/issues/2783
maxRedirects may be causing this issue, try setting lower redirects or 0, as the resolved issue suggests.

How to get specific chunks from stream node js

I'm trying to make a server that are used like a "CDN proxy".
We have
S1: main server that has all media
S2 CDN proxy
client
The aim is:
obtain a stream from server1 (S1)
(I'm using this follow link as placeholder, the effective link could be a temp link)
axios.get(link, { responseType: "stream", adapter: httpAdapter })
.then((axiosResponse: any) => { ... })
since I have a stream, I don't need to proxy the entire media to the client, but just a chunk (specified from the client)
I don't know how to retrieve a specific chunk without download all chunks up to the desired chunk.
This is a scratch:
import express, { Request, Response } from 'express';
import expressAsyncHandler from 'express-async-handler';
import * as http from 'http';
const axios = require("axios");
const httpAdapter = require("axios/lib/adapters/http");
const app = express();
const HTTP_PORT = 3000;
var server = http.createServer(app);
const link = 'https://images.all-free-download.com/footage_preview/mp4/city_panorama_6891675.mp4';
app.get('/video.mp4', expressAsyncHandler(async (req: Request, res: Response) => {
axios.get(link, { responseType: "stream", adapter: httpAdapter })
.then((axiosResponse: any) => {
let stream = axiosResponse?.data;
const fileSize = axiosResponse["headers"]["content-length"];
const range = req.headers.range
if (range) {
const parts = range.replace(/bytes=/, "").split("-")
const start = parseInt(parts[0], 10)
const end = parts[1]
? parseInt(parts[1], 10)
: fileSize - 1
const chunksize = (end - start) + 1
/*******************************/
const streamChunk = /* GET CHUNCK FROM STREAM WITHOUT OVERHEAD */ null;
/*******************************/
const head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'video/mp4',
}
res.writeHead(206, head);
streamChunk.pipe(res);
} else {
const head = {
'Content-Length': fileSize,
'Content-Type': 'video/mp4',
}
res.writeHead(200, head)
stream.pipe(res)
}
})
}));
server.listen(HTTP_PORT, () => {
console.log("Running on port: " + HTTP_PORT);
});
I hope someone can help me.
Thanks in advice :)
UPDATE
Follow code works on VLC
import express, { Request, Response } from 'express';
import expressAsyncHandler from 'express-async-handler';
import * as http from 'http';
import * as https from 'https';
const axios = require("axios");
const httpAdapter = require("axios/lib/adapters/http");
const app = express();
const HTTP_PORT = 3000;
var server = http.createServer(app);
/************************************************/
// PREVENT EXCEPTION CLOSURE
process.on('uncaughtException', function (err) {
console.error(err);
console.log("Node NOT Exiting...");
});
/************************************************/
const link = 'https://samplelib.com/lib/preview/mp4/sample-30s.mp4';
app.get('/video.mp4', expressAsyncHandler(async (req: Request, res: Response) => {
if (req.socket.destroyed) {
return;
}
delete req.headers.referer;
let head;
let status;
const range = req.headers.range
const axiosResponseHead = await axios.head(link)
const fileSize = axiosResponseHead["headers"]["content-length"];
const agent = new https.Agent({
rejectUnauthorized: false
});
console.log(range)
if (range) {
req.headers.host = new URL(link).hostname;
const parts = range?.replace(/bytes=/, "")?.split("-")
const start = parseInt(parts[0], 10)
const end = parts[1]
? parseInt(parts[1], 10)
: fileSize - 1
const chunksize = (end - start) + 1
head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'range': `bytes=${start}-${end}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'video/mp4',
}
status = 206;
req.headers.range = head.range
} else {
head = {
'Content-Length': fileSize,
'Content-Type': 'video/mp4',
}
status = 200;
}
console.log(req.headers)
console.log(head)
console.log("==================================")
let axiosResponse: any
let stream: any;
res.on('close', function () {
stream?.destroy();
});
let instance = axios.create();
axiosResponse = await instance.get(link, {
responseType: "stream", adapter: httpAdapter, headers: req.headers, httpsAgent: agent
})
stream = axiosResponse?.data;
res.writeHead(status, head)
stream.pipe(res, { end: true });
}));
server.listen(HTTP_PORT, () => {
console.log("Running on port: " + HTTP_PORT);
});
function sleep(ms: number) {
return new Promise(resolve => setTimeout(resolve, ms));
}

Graphql Connect Error - Only getting Query or Subscription or both

We are encountering an error in the setup of our application to the graphql connection. Our main need is Subscriptions, currently our code handles these fine. But our queries respond with "Leads not found in query_root"
When we modify the positioning of our connection code, the query now will work but subscriptions stop connecting.
const scheme = proto => {
return window.location.protocol === 'https:' ? `${proto}s` : proto;
};
const wsurl = `${scheme('ws')}://${HASURA_GRAPHQL_ENGINE_HOSTNAME}/v1/graphql`;
const httpurl = `${scheme('http')}://${HASURA_GRAPHQL_ENGINE_HOSTNAME}/v1/graphql`;
// Create a WebSocket link:
const wsLink = new WebSocketLink({
uri: wsurl,
options: {
reconnect: true,
connectionParams: {
headers: {
'x-hasura-access-key': `${HASURA_ACCESS_KEY}`
}
}
}
});
const httpLink = new HttpLink({
uri: httpurl,
options: {
connectionParams: {
headers: {
'x-hasura-access-key': `${HASURA_ACCESS_KEY}`
}
}
}
});
const link = split(
// split based on operation type
({ query }) => {
const { kind, operation } = getMainDefinition(query);
return kind === 'OperationDefinition' && operation === 'subscription';
},
wsLink,
httpLink
);
const client = new ApolloClient({
link,
cache: new InMemoryCache()
});
export default client;
In the link const, if we put wsLink below httpLink then wsLink stops and httpLink works.

node-fetch timeout issue/ express timeout/ lambda timeout or sth else?

below code works fine when running locally, but "getAccessToken ()" does not work as expected when running inside an aws lambda function, when doing "POST" to /webhook endpoint. I am using node 8.10, "aws-serverless-express": "^3.3.6", "express": "^4.16.4" and "node-fetch": "^2.5.0", basically it print the correct jwt token but node-fetch does not return anything, sample log.
START RequestId: c8efba59-1869-4eaa-b9d8-aa15a7507d52 Version: $LATEST
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 start getExecution
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 exectution_url:
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 https://cloudmanager.adobe.io/somevalidurl
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 start getAccessToken
END RequestId: c8efba59-1869-4eaa-b9d8-aa15a7507d52
REPORT RequestId: c8efba59-1869-4eaa-b9d8-aa15a7507d52 Duration: 6657.00 ms Billed Duration: 6700 ms Memory Size: 128 MB Max Memory Used: 37 MB
I made sure lambda timeout is 30 seconds, tried to disable "node-fetch" timeout by setting it to 0 and , used a middleware for all the routes "app.use(timeout("30000"))" also for that specific webhook request timeout. (I receive the 200 pong response immediately but getexectuion async function does not work properly)
const express = require('express')
const bodyParser = require('body-parser')
const crypto = require('crypto')
const jsrsasign = require('jsrsasign')
const fetch = require('node-fetch')
const timeout = require('connect-timeout')
const URL = require('url').URL
const URLSearchParams = require('url').URLSearchParams
//require('dotenv').config()
const app = express()
async function getAccessToken () {
console.log("start getAccessToken")
const EXPIRATION = 60 * 60 // 1 hour
const header = {
'alg': 'RS256',
'typ': 'JWT'
}
const payload = {
'exp': Math.round(new Date().getTime() / 1000) + EXPIRATION,
'iss': process.env.ORGANIZATION_ID,
'sub': process.env.TECHNICAL_ACCOUNT_ID,
'aud': `https://ims-na1.adobelogin.com/c/${process.env.API_KEY}`,
'https://ims-na1.adobelogin.com/s/ent_cloudmgr_sdk': true
}
const jwtToken = jsrsasign.jws.JWS.sign('RS256', JSON.stringify(header), JSON.stringify(payload), process.env.PRIVATE_KEY)
//console.log("jwt token:")
//console.log(jwtToken)
const body = new URLSearchParams({
client_id: process.env.API_KEY,
client_secret: process.env.CLIENT_SECRET,
jwt_token: jwtToken
})
const response = await fetch('https://ims-na1.adobelogin.com/ims/exchange/jwt', {
method: 'POST',
options: { timeout: 0},
timeout: 0,
size: 0,
body: body
})//.catch(error => {
// console.log("an error happend in fetchg")
// console.log(error)
//})
const json = await response.json()
if ((response.status !== 200) && (response.status !== 201)) {
console.error(`Invalid response status ${ response.status }.`);
throw json;
}
console.log("access_token:")
console.log(json['access_token'])
return json['access_token']
}
async function makeApiCall (accessToken, url, method) {
console.log("start make api call")
const response = await fetch(url, {
'method': method,
'headers': {
'x-gw-ims-org-id': process.env.ORGANIZATION_ID,
'x-api-key': process.env.API_KEY,
'Authorization': `Bearer ${accessToken}`
}
})
console.log("finish make api call")
const json = await response.json()
return json
}
function getLink (obj, linkType) {
return obj['_links'][linkType].href
}
async function getExecution (executionUrl) {
console.log("start getExecution")
console.log("exectution_url:")
console.log(executionUrl)
const accessToken = await getAccessToken()
console.log("access-token:")
console.log(accessToken)
const execution = await makeApiCall(accessToken, executionUrl, 'GET')
console.log(execution)
console.log("aaaa")
const program = await makeApiCall(accessToken, new URL(getLink(execution, 'http://ns.adobe.com/adobecloud/rel/program'), executionUrl))
console.log(execution)
console.log("here")
execution.program = program
return execution
}
//app.use(bodyParser.json())
app.use(bodyParser.json({
verify: (req, res, buf, encoding) => {
const signature = req.header('x-adobe-signature')
if (signature) {
const hmac = crypto.createHmac('sha256', process.env.CLIENT_SECRET)
hmac.update(buf)
const digest = hmac.digest('base64')
if (signature !== digest) {
throw new Error('x-adobe-signature HMAC check failed')
}
} else if (!process.env.DEBUG && req.method === 'POST') {
throw new Error('x-adobe-signature required')
}
}
}))
app.use(timeout("30000"))
app.post('/webhook', (req, res) => {
req.setTimeout(120000, function(){
console.log('Request has timed out.');
res.send(408);
});
res.writeHead(200, { 'Content-Type': 'application/text' })
res.end('pong')
getExecution("https://cloudmanager.adobe.io/<somevalidurl>").then(execution => {
console.log(`Execution for ${execution.program.name} started`)
})
})
module.exports = app;
//const port = process.env.PORT || 3000
//app.listen(port, () =>
// console.log(`App is listening on port ${port}.`)
//)

Resources