Nodejs - Need to perform around 100 API Async request - node.js

I am new for nodejs and I trying to perform around 100 API request by using axios npm in single request. What will be best code logic to capture all response in better way of performance / error tracking.
Need to capture all response in single JSON file, so I decided to use createWriteStream('filename.json')
to avoid the memory issue.
I tried something
const axios = require('axios');
const fs = require('fs');
const config = require('./config/secret.json');
app.get('/json', (req,res) => {
const linkArr = ['https://apirequest1.com','https://apirequest2.com','https://apirequest3.com','https://apirequest4.com', '...'];
const wipArr = [];
for(let getAPI of linkArr){
axios({
method: 'get',
url: getAPI,
auth: {username: config.username, password: config.secret}
})
.then(function (response){
const writeStream = fs.createWriteStream('wip.json');
writeStream.write(JSON.stringify(response.data));
})
.catch(function (error){
console.log(error);
})
}
res.send('successfully saved all response');
});
Capture all API response in single hit and save them in array after completing the API request, need to write all response in JSON file.
Thanks in advance!

The first issue you have is that you create the stream everytime. This will overwrite the contents each time the promise is resolved. Remove this line.
const writeStream = fs.createWriteStream('wip.json');
You will have something like this.
const axios = require('axios');
const fs = require('fs');
const config = require('./config/secret.json');
const writeStream = fs.createWriteStream('wip.json');
app.get('/json', (req,res) => {
const linkArr = ['https://apirequest1.com','https://apirequest2.com','https://apirequest3.com','https://apirequest4.com', '...'];
const wipArr = [];
for(let getAPI of linkArr){
axios({
method: 'get',
url: getAPI,
auth: {username: config.username, password: config.secret}
})
.then(function (response){
//const writeStream = fs.createWriteStream('wip.json'); // remove this line because it will overwrite the file for each response.
writeStream.write(JSON.stringify(response.data));
})
.catch(function (error){
console.log(error);
})
}
res.send('successfully saved all response');
})
;
EDIT: To wait for all requests, You can try something like this.
app.get('/json', async (req, res) => {
let resp = null;
const writeStream = fs.createWriteStream('wip.json');
const linkArr = ['https://apirequest1.com', 'https://apirequest2.com', 'https://apirequest3.com', 'https://apirequest4.com', '...'];
const promises = [];
for (let getAPI of linkArr) {
promises.push(makeCall(getAPI));
resp = await Promise.all(promises); // resp is array of responses
// for (let i = 0; i < resp.length; i++) {
// writeStream.write(JSON.stringify(resp[i], null, 4)); // to //format the json string
// }
}
for (let i = 0; i < resp.length; i++) {
writeStream.write(JSON.stringify(resp[i], null, 4)); // to format the json string
}
res.send('successfully saved all response');
});
function makeCall(getAPI) {
axios({
method: 'get',
url: getAPI,
auth: { username: config.username, password: config.secret }
})
.then(function(response) {
return response.data;
});
}
I have not tested it but something along those lines. This will run all the requests.
To format JSON strings you can use.
JSON.stringify(resp[i], null, 4).
Have a look at https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify
Edit: The problem was that the writeStream.write(JSON.stringify(resp[i], null, 4)); was inside the loop. Moved it outside.
Added code without testing. This should work for you.
app.get('/json', async(req, res) => {
const writeStream = fs.createWriteStream('wip.json');
const linkArr = ['https://apirequest1.com', 'https://apirequest2.com', 'https://apirequest3.com', 'https://apirequest4.com', '...'];
const promises = [];
for (let getAPI of linkArr) {
promises.push(makeCall(getAPI));
}
const resp = await Promise.all(promises); // resp is array of responses
for (let i = 0; i < resp.length; i++) {
writeStream.write(JSON.stringify(resp[i], null, 4)); // to format the json string
}
res.send('successfully saved all response');
});
function makeCall(getAPI) {
return axios({
method: 'get',
url: getAPI,
auth: { username: config.username, password: config.secret }
})
}

Related

Why the nodejs heap out of memory for creating Excel file with big data?

I am creating an excel file at nodejs end and returning base64 data to reactJS to download the file. At nodejs end, I am using promise all and fetch data from a server in chunks and append data into Excel as
worksheet.addRows(data);
For data around 20-30k, it is working fine but for data like 100k, it shows me an error heap out of memory at nodejs end.
I have increase memory allocate to nodejs also but same error
node --max_old_space_size=5000 app.js
What I am doing wrong any suggestions?
Nodejs
const axios = require('axios');
var excel = require("exceljs");
const workbook = new excel.Workbook();
const worksheet = workbook.addWorksheet("My Sheet");
worksheet.columns = [
{ header: "TicketId", key: "ticketId" },
{ header: "Email", key: 'user_email' },
{ header: "User", key : 'user_name' },
{ header: "Subject", key: "subject" },
...//many more headers
];
exports.getTicketData = async (req, res, next) => {
res.connection.setTimeout(0);
const { body } = req;
const token = body.token;
const organization_id = body.organization_id;
const server = body.server;
const sideFilter = body.sideFilter;
let baseurl = 'url for server end to fetch data';
if (baseurl) {
let data = new Array();
let limit = 3000;
const promises = [];
try {
let count = await getCount(token,limit, organization_id, baseurl, sideFilter);
for(var i = 1;i<=count;i++) {
promises.push(getData(i,limit,organization_id,token, baseurl, sideFilter));
}
await Promise.all(promises).then((results) => {
}).catch((e) => {
throw e;
});
var base64File = await writeExcelAndUpload(workbook);
return res.status(200).json({ file:base64File });
} catch (err) {
return res.status(400).json({ type:'error', msg:'File not generated please contact support staff' });
}
} else {
return res.status(400).json({ type:'error', msg:'please define server name' });
}
};
let getData = (page,limit, organization_id,token, baseurl, sideFilter) =>{
return new Promise((resolve, reject) => {
axios.post(baseurl+`/v2/get-export`, {
page:page,
organization_id:organization_id,
per_page:limit,
filter: "",
sorted:"",
...sideFilter
},{ headers: {"Authorization" : `Bearer ${token}`} }).then(function (response) {
let dataTemp = response.data.data.data.map((t,i)=>{
return {
...t,
name:t.name,
...//many more columns like 70
}
});
worksheet.addRows(dataTemp);
resolve(true);
}).catch(function (error) {
reject(error);
});
});
}
let getCount = (token,limit, organization_id, baseurl, sideFilter) => {
// run an api and return count against limit
}
let writeExcelAndUpload = async (workbook) => {
const fileBuffer = await workbook.xlsx.writeBuffer();
let base64File = Buffer.from(fileBuffer).toString('base64');
base64File = 'data:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;base64,'+base64File;
return base64File;
}
Client side reactjs
exportLink = () => {
postData ={
...
};
return axios.post(`${baseurl}/api/ticketing/get-ticket`, postData).then(function (response) {
const downloadLink = document.createElement("a");
const fileName = "export.xlsx";
downloadLink.href = response.data.file;
downloadLink.download = fileName;
downloadLink.click();
}).catch(function(error){
throw error;
});
}
Well, it is kinda expected that you may get a heap out of memory when working with such an amount of entries like 100k.
I could suggest you start using pagination, and instead of fetching e.g. 100k of entries at once fetch 1k of entries do what you need with them, then fetch the next 1k of entries repeat until you processed all entries.

Node set variable from text file

I am working on oauth2 for an API client, and I am having trouble getting the promise back in time to continue with the other calls to the API. My tokens expire every 30 minutes and my node runs every 10 minutes. I thought I could set a text file to the newest token each time the script runs, and grab it at the beginning and always have a good token to use for auth. The variable doesn't get set in time to make the calls, so the header has undefined next the Bearer. I can't figure out how to get the variable set before the call goes off.
Here is the script I am using to make the calls
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const Axios = require("axios");
const moment = require("moment");
const config = require("../config/default");
const oauth = require("axios-oauth-client");
const fs = require('fs');
let accessToken;
var token = fs.readFile('token.txt', 'utf8', function(err, data) {
if (err) throw err;
return data;
});
const getOwnerCredentials = oauth.client(Axios.create(), {
url: 'url',
grant_type: 'password',
client_id: 'username',
client_secret: 'secret',
username: 'username',
password: 'password',
scope: 'scope'
});
const tokenCall = async () => {
const result = await getOwnerCredentials();
return result
}
const getToken = async () => {
const accessToken = await tokenCall();
const fs = require('fs')
fs.writeFile('/root/qt-cwsedona/token.txt', '', function(){console.log('done')})
fs.writeFile('token.txt', accessToken.access_token, function (err) {
if (err) return console.log(err);
});
};
getToken();
class Sedona {
constructor() {
this.baseUrl = config.sedonaUrl;
this.client = Axios.default.create({
baseURL: this.baseUrl,
auth: {
Authorization: 'Bearer ' + token
},
});
console.log(this.client);
}
getCustomerBillId(customer_id) {
return this.client.get('/CustomerBill/' + customer_id).then(response => {
let result = parseInt(response['data'][0]['CustomerBillId']);
if (isNaN(result)) {
return "";
}
else {
return result.toString();
}
}).catch(error => {
throw error;
});
}
Then I am this code to actually kick off these functions
const Sedona = require("./services/SedonaService");
Promise.all([
sedona.getCustomerBillId(customerId)
]).then(sedona_results => {
This is probably not the right way to do this, but I was able to set the default header for axios in the async function and it worked.
const getToken = async () => {
const accessToken = await tokenCall();
const token = accessToken.access_token;
Axios.defaults.headers.common['Authorization'] = 'Bearer '+accessToken.access_token;
};

How to concat chunks of incoming binary into video (webm) file node js?

I am trying to upload chunks of base64 to node js server and save those chunks into one file
let chunks = [];
app.post('/api', (req, res) => {
let {blob} = req.body;
//converting chunks of base64 to buffer
chunks.push(Buffer.from(blob, 'base64'));
res.json({gotit:true})
});
app.post('/finish', (req, res) => {
let buf = Buffer.concat(chunks);
fs.writeFile('finalvideo.webm', buf, (err) => {
console.log('Ahh....', err)
});
console.log('SAVED')
res.json({save:true})
});
Problem with the above code is video is not playable I don't why Am I really doing something wrong and I've also tried writable streams it is not working either
UPDATE - I
Instead of sending blobs I've implemented to send binary but even though I am facing a problem like TypeError: First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.
client.js
postBlob = async blob => {
let arrayBuffer = await new Response(blob).arrayBuffer();
let binary = new Uint8Array(arrayBuffer)
console.log(binary) // logging typed Uint8Array
axios.post('/api',{binary})
.then(res => {
console.log(res)
})
};
server.js
let chunks = [];
app.post('/api', (req, res) => {
let {binary} = req.body;
let chunkBuff = Buffer.from(binary) // This code throwing Error
chunks.push(chunkBuff);
console.log(chunkBuff)
res.json({gotit:true})
});
//Somehow combine those chunks into one file
app.post('/finish', (req, res) => {
console.log('Combinig the files',chunks.length);
let buf = Buffer.concat(chunks);
console.log(buf) //empty buff
fs.writeFile('save.webm', buf, (err) => {
console.log('Ahh....', err)
});
res.json({save:true})
});
UPDATE - II
I am able to receive the binary chunk and append to a stream but in the final video only first chunk is playing I don't know what happened to other chunks and the video ends.
code
const writeMyStream = fs.createWriteStream(__dirname+'/APPENDED.webm', {flags:'a', encoding:null});
app.post('/api', (req, res) => {
let {binary} = req.body;
let chunkBuff = Buffer.from(new Uint8Array(binary));
writeMyStream.write(chunkBuff);
res.json({gotit:true})
});
UPDATE - III
my client code | Note: I've tried other ways to upload blobs I've commented out
customRecordStream = stream => {
let recorder = new MediaStreamRecorder(stream);
recorder.mimeType = 'video/webm;codecs=vp9';
recorder.ondataavailable = this.postBlob
recorder.start(INT_REC)
};
postBlob = async blob => {
let arrayBuffer = await new Response(blob).arrayBuffer();
let binary = new Uint8Array(arrayBuffer)
axios.post('/api',{binary})
.then(res => {
console.log(res)
})
// let binaryUi8 = new Uint8Array(arrayBuffer);
// let binArr = Array.from(binaryUi8);
// // console.log(new Uint8Array(arrayBuffer))
//
// console.log(blob);
// console.log(binArr)
// let formData = new FormData();
// formData.append('fname', 'test.webm')
// formData.append("file", blob);
//
// console.log(formData,'Checjk Me',blob)
// axios({
// method:'post',
// url:'/api',
// data:formData,
// config: { headers: {'Content-Type': 'multipart/form-data' }}
// }).then(res => {
// console.log(res,'FROM SERBER')
//
// })
//
//
// .then(res => {
// console.log(res)
// })
// this.blobToDataURL(blob, (blobURL) => {
//
// axios.post('/api',{blob:blobURL})
// .then(res => {
// console.log(res)
// })
// })
};
I was able to get this working by converting to base64 encoding on the front-end with the FileReader api. On the backend, create a new Buffer from the data chunk sent and write it to a file stream. Some key things with my code sample:
I'm using fetch because I didn't want to pull in axios.
When using fetch, you have to make sure you use bodyParser on the backend
I'm not sure how much data you're collecting in your chunks (i.e. the duration value passed to the start method on the MediaRecorder object), but you'll want to make sure your backend can handle the size of the data chunk coming in. I set mine really high to 50MB, but this may not be necessary.
I never close the write stream explicitly... you could potentially do this in your /final route. Otherwise, createWriteStream defaults to AutoClose, so the node process will do it automatically.
Full working example below:
Front End:
const mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
let mediaRecorder;
let sourceBuffer;
function customRecordStream(stream) {
// should actually check to see if the given mimeType is supported on the browser here.
let options = { mimeType: 'video/webm;codecs=vp9' };
recorder = new MediaRecorder(window.stream, options);
recorder.ondataavailable = postBlob
recorder.start(INT_REC)
};
function postBlob(event){
if (event.data && event.data.size > 0) {
sendBlobAsBase64(event.data);
}
}
function handleSourceOpen(event) {
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
}
function sendBlobAsBase64(blob) {
const reader = new FileReader();
reader.addEventListener('load', () => {
const dataUrl = reader.result;
const base64EncodedData = dataUrl.split(',')[1];
console.log(base64EncodedData)
sendDataToBackend(base64EncodedData);
});
reader.readAsDataURL(blob);
};
function sendDataToBackend(base64EncodedData) {
const body = JSON.stringify({
data: base64EncodedData
});
fetch('/api', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body
}).then(res => {
return res.json()
}).then(json => console.log(json));
};
Back End:
const fs = require('fs');
const path = require('path');
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const server = require('http').createServer(app);
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json({ limit: "50MB", type:'application/json'}));
app.post('/api', (req, res) => {
try {
const { data } = req.body;
const dataBuffer = new Buffer(data, 'base64');
const fileStream = fs.createWriteStream('finalvideo.webm', {flags: 'a'});
fileStream.write(dataBuffer);
console.log(dataBuffer);
return res.json({gotit: true});
} catch (error) {
console.log(error);
return res.json({gotit: false});
}
});
Inspired by #willascend answer:
Backend-side:
app.use(express.raw());
app.post('/video-chunck', (req, res) => {
fs.createWriteStream('myvideo.webm', { flags: 'a' }).write(req.body);
res.sendStatus(200);
});
Frontend-side:
mediaRecorder.ondataavailable = event => {
if (event.data && event.data.size > 0) {
fetch(this.serverUrl + '/video-chunck', {
method: 'POST',
headers: {'Content-Type': 'application/octet-stream'},
body: event.data
});
}
};
My express version is 4.17.1
i faced the same problem today
as a solution in back-end i used fs.appendfile
fs.appendFile(Path, rawData, function (err) {
if (err) throw err;
console.log('Chunck Saved!');
})

How to wait for a url callback before send HTTP response in koa?

I have a koa router I need to call a api where will async return result. This means I cannot get my result immediately, the api will call my callback url when it's ok. But now I have to use it like a sync api which means I have to wait until the callback url is called.
My router like this:
router.post("/voice", async (ctx, next) => {
// call a API here
const params = {
data: "xxx",
callback_url: "http//myhost/ret_callback",
};
const req = new Request("http://xxx/api", {
method: "POST",
body: JSON.stringify(params),
});
const resp = await fetch(req);
const data = await resp.json();
// data here is not the result I want, this api just return a task id, this api will call my url back
const taskid = data.taskid;
// now I want to wait here until I got "ret_callback"
// .... wait .... wait
// "ret_callback" is called now
// get the answer in "ret_callback"
ctx.body = {
result: "ret_callback result here",
}
})
my callback url like this:
router.post("/ret_callback", async (ctx, next) => {
const params = ctx.request.body;
// taskid will tell me this answer to which question
const taskid = params.taskid;
// this is exactly what I want
const result = params.text;
ctx.body = {
code: 0,
message: "success",
};
})
So how can I make this aync api act like a sync api?
Just pass a resolve() to another function. For example, you can do it this way:
// use a map to save a lot of resolve()
const taskMap = new Map();
router.post("/voice", async (ctx, next) => {
// call a API here
const params = {
data: "xxx",
callback_url: "http//myhost/ret_callback",
};
const req = new Request("http://xxx/api", {
method: "POST",
body: JSON.stringify(params),
});
const resp = await fetch(req);
const data = await resp.json();
const result = await waitForCallback(data.taskid);
ctx.body = {
result,
} })
const waitForCallback = (taskId) => {
return new Promise((resolve, reject) => {
const task = {};
task.id = taskId;
task.onComplete = (data) => {
resolve(data);
};
task.onError = () => {
reject();
};
taskMap.set(task.id, task);
});
};
router.post("/ret_callback", async (ctx, next) => {
const params = ctx.request.body;
// taskid will tell me this answer to which question
const taskid = params.taskid;
// this is exactly what I want
const result = params.text;
// here you continue the waiting response
taskMap.get(taskid).onComplete(result);
// not forget to clean rubbish
taskMap.delete(taskid);
ctx.body = {
code: 0,
message: "success",
}; })
I didn't test it but I think it will work.
function getMovieTitles(substr) {
let movies = [];
let fdata = (page, search, totalPage) => {
let mpath = {
host: "jsonmock.hackerrank.com",
path: "/api/movies/search/?Title=" + search + "&page=" + page,
};
let raw = '';
https.get(mpath, (res) => {
res.on("data", (chunk) => {
raw += chunk;
});
res.on("end", () => {
tdata = JSON.parse(raw);
t = tdata;
totalPage(t);
});
});
}
fdata(1, substr, (t) => {
i = 1;
mdata = [];
for (i = 1; i <= parseInt(t.total_pages); i++) {
fdata(i, substr, (t) => {
t.data.forEach((v, index, arrs) => {
movies.push(v.Title);
if (index === arrs.length - 1) {
movies.sort();
if (parseInt(t.page) === parseInt(t.total_pages)) {
movies.forEach(v => {
console.log(v)
})
}
}
});
});
}
});
}
getMovieTitles("tom")
Okay so first of all, this should not be a "goal" for you. NodeJS works better as ASync.
However, let us assume that you still want it for some reason, so take a look at sync-request package on npm (there is a huge note on there that you should not this in production.
But, I hope you mean on how to make this API simpler (as in one call kinda thingy). You still need .next or await but it will be be one call anyway.
If that is the case, please comment on this answer I can write you a possible method I use myself.
How about this ?
router.post("/voice", async (ctx, next) => {
const params = {
data: "xxx",
callback_url: "http//myhost/ret_callback",
};
const req = new Request("http://xxx/api", {
method: "POST",
body: JSON.stringify(params),
});
const resp = await fetch(req);
const data = await resp.json();
// data here is not the result I want, this api just return a task id, this api will call my url back
const taskid = data.taskid;
let response = null;
try{
response = await new Promise((resolve,reject)=>{
//call your ret_callback and when it finish call resolve(with response) and if it fails, just reject(with error);
});
}catch(err){
//errors
}
// get the answer in "ret_callback"
ctx.body = {
result: "ret_callback result here",
}
});

How to make an HTTP request in Cloud Functions for Firebase?

I am trying to make a call to apples receipt verification server using Cloud Functions for Firebase. Any idea how to make an HTTP call?
Keep in mind that your dependency footprint will affect deployment and cold-start times. Here's how I use https.get() and functions.config() to ping other functions-backed endpoints. You can use the same approach when calling 3rd party services as well.
const functions = require('firebase-functions');
const https = require('https');
const info = functions.config().info;
exports.cronHandler = functions.pubsub.topic('minutely-tick').onPublish((event) => {
return new Promise((resolve, reject) => {
const hostname = info.hostname;
const pathname = info.pathname;
let data = '';
const request = https.get(`https://${hostname}${pathname}`, (res) => {
res.on('data', (d) => {
data += d;
});
res.on('end', resolve);
});
request.on('error', reject);
});
});
Answer is copied from OP's edit in question
OP solved this using https://github.com/request/request
var jsonObject = {
'receipt-data': receiptData,
password: functions.config().apple.iappassword
};
var jsonData = JSON.stringify(jsonObject);
var firebaseRef = '/' + fbRefHelper.getUserPaymentInfo(currentUser);
let url = "https://sandbox.itunes.apple.com/verifyReceipt"; //or production
request.post({
headers: {
'content-type': 'application/x-www-form-urlencoded'
},
url: url,
body: jsonData
}, function(error, response, body) {
if (error) {
} else {
var jsonResponse = JSON.parse(body);
if (jsonResponse.status === 0) {
console.log('Recipt Valid!');
} else {
console.log('Recipt Invalid!.');
}
if (jsonResponse.status === 0 && jsonResponse.environment !== 'Sandbox') {
console.log('Response is in Production!');
}
console.log('Done.');
}
});
mostly using https://nodejs.org/api/https.html
const http = require("http");
const https = require('https');
const mHostname ='www.yourdomain.info';
const mPath = '/path/file.php?mode=markers';
const options = {
hostname: mHostname,
port: 80, // should be 443 if https
path: mPath ,
method: 'GET',
headers: {
'Content-Type': 'application/json'//; charset=utf-8',
}
};
var rData=""
const req0 = http.request(options, (res0)=>
{
res0.setEncoding('utf8');
res0.on('data',(d) =>{
rData+=d;
});
res0.on('end',function(){
console.log("got pack");
res.send("ok");
});
}).on('error', (e) => {
const err= "Got error:"+e.message;
res.send(err);
});
req0.write("body");//to start request

Resources