Unable to update object data using sails.config.bootstrap - node.js

I'm unable to update an object upon lifting a sails app. I think something is wrong with scope but I'm not sure.
config/cron.js
const updaterService = require('../api/services/updater');
module.exports.cron = {
Update: {
schedule: '0 * * * * *',
onTick: function () {
console.log('You will see this every second');
updaterService.updateObj();
}
}
};
api/services/updater.js
const rp = require('request-promise');
const testObj = {
data: '',
};
const showData = () => {
sails.log.info(testObj);
};
const updateObj = async () => {
try {
const updateResponse = await rp(updateOpts);
testObj.data = updateResponse.body.data;
sails.log.info(testObj);
} catch (updateErr) {
sails.log.error(`${updateErr.statusCode}: ${updateErr}`);
}
};
What I get when I call showData() is that there is no update to the object (second line). But when updateObj is called from config/cron, it seems like it has been updated though (first line):
{ data: 'some data here' }
{ data: '' }

Firstly, let's look at services with SailsJS, particularly in versions < 1 where they have since been replaced with helpers.
To create a service, there is a strict filenaming convention. Make sure your service's filename ends in Service.js. The first part of this filename will be used as the globally-accessible variable name for the service.
In your example, this would mean renaming the file to UpdaterService.js.
Next, services within Sails export some reusable funtionality. In your example, this will most likely be your function call to updateObj. I'm not sure why you are using request-promise for a scheduled (batch) job, so for extra simplicity, I will just use request. If not already in your library, you will need to install request - npm install request.
This is a complete example service below, I've cut out the stuff that does not make sense to me from your code.
UpdaterService.js.
const request = require('request');
module.exports = {
updateObj : () => {
// I am assuming updateOpts is some URL with serialized params, so let's just call it url
let url = "https://www.example.com?foo=bar";
request(url, function (error, response, body) {
if(error){
sails.log.error(error);
}
if(body && body.data){
sails.log.info(body.data);
}
});
}
};
And here is an example cron file calling the above service every day at 12 midday. No need require the service as it is automatically globally-accessible if named correctly.
config/cron.js
module.exports.cron = {
dailyScheduler: {
schedule: '0 0 12 * * *',
onTick: function() {
UpdaterService.updateObj();
}
}
};

Related

Node script runs fine outside of lambda but not inside

I have a fairly straightforward script that reads summary data from an api and the loops through the records to save the detail to a database.
The code runs without problems when I launch it from VS Code but when I move it into a Lambda function it only runs halfway through.
There are two api calls using axios. The first gets the summary and the second pulls the detail.
The first call works in Lambda. The second, which uses the same method, does not. I can tell through logging statements that the correct data is getting to the second method. The only real differences are that the second is in a loop and it also uses Bottleneck to prevent overloading a touchy api.
I have put logging statements all over the place but once the routine enters the second api call I get no response at all. The logging statement directly inside the routine shows that it is getting there but I don't get anything back from axios. No success or error.
Here is the code.
var Bottleneck = require("bottleneck");
const axios = require('axios');
const Sequelize = require('sequelize');
let apiKey = process.env.APIKEY;
var timeDelay = 1000;
const instance = axios.create({
baseURL: 'https://anapi.com/api/v1/',
headers: {
'Content-Type': "application/json",
'X-Auth-Key': apiKey,
}
});
const limiter = new Bottleneck({
maxConcurrent: 1,
minTime: timeDelay
});
const sequelize = new Sequelize(
"postgres://postgres:reallystrongpassword#awsrdsdb.cluster-vms39sknjssk1.us-west-2.rds.amazonaws.com/targetdatabase"
);
const notes = sequelize.define(
"notes",
{
appointmentid: {
type: Sequelize.STRING,
}, ...
questions: {
type: Sequelize.JSONB,
},
},
{
tableName: "notes",
timestamps: false
}
);
async function notesInject(detailData) {
log.info("inside notesInject");
const injector = await notes.create({
appointmentid: detailData.AppointmentId,
...
questions: detailData.Questions,
}).then(function(){
log.info("created note ", detailData.Id)
}).catch(function(error){
log.info(error)
})
}
function getDetail(detailId) {
log.info(detailId)
try {
instance.get('notes/' + detailId)
.then ((resp) => {
try {
var detailData = (resp.data)
} catch {
log.info("detailData success", resp.status)
}
try {
notesInject(detailData)
} catch (error) {
log.info("notesInject catch", resp.status);
}
})
} catch (error) {
log.info("error in the detail instance")
}
}
function procDetail(apiData) {
for (let i = 0; i < apiData.length; i++) {
const element = apiData[i];
let detailId = element.Id;
getDetail(detailId)
}
}
function getTodayData() {
const pullDate = new Date();
const dateY = pullDate.getFullYear();
const dateM = pullDate.getMonth()+1;
const dateD = pullDate.getDate()-1;
const apiDate = (dateY+'-'+dateM+'-'+dateD)
try {
instance.get('notes/summary?startDate=' + apiDate)
.then ((resp) => {
try {
var apiData = (resp.data)
} catch {
log.info("set apiData", resp.status)
}
try {
procDetail(apiData)
} catch (error) {
log.info("saveDetail", resp.status);
}
})
} catch (error) {
log.info("in the summary instance")
}
}
exports.handler = async (event) => {
getTodayData();
};
I was thinking that the problem was with Bottleneck because that is the most significant difference between the first and second axios calls. When I isolated the database write code after the api pull, it had the same behavior. No error or response.
I'm stumped. I've logged everything I can think of. Lambda doesn't display console.log messages for this so I've been using Lambda-Log.
I'm sure it's something dumb but it works just fine when I run it from Code.
If anyone has any idea what I'm doing wrong, please let me know.
Sorry if I posted too much code but I really don't know where the problem is.
Many thanks

How can I share data between 2 resolver functions in Express GraphQL?

I have 2 queries, and corresponding functions, but while writing the resolver, I'm not sure how to store the 1st func data, then reuse it in the second one. Note: I do not want to call the function again as it will be executing again and has an inline API call. I just want to use it like a session on the global state in express js. Here's the code:
const resolvers={
getStudent:async({id})=>{
const resp=await service(id)
return resp;
},
const courseDetails:()=>{
console.log(resp)// I want to access resp object from above func., But don't want to call getStudent again
}
}
I tried context but didn't work.
You can implement a simple in-memory store.
By storing the Promise and returning it you won't need to worry about multiple requests to the same resources.
const got = require('got');
const assert = require('assert');
function studentServiceFactory(options = {}) {
const TTL = options.ttl || 60 * 60 * 5; // default 5 min ttl
const BASE_API = "https://swapi.dev/api";
const store = {};
return {
get: ({ id }) => {
if(!store[id] || store[id].timestamp + TTL < Date.now()) {
// store the promise
store[id] = {
promise: got(`${BASE_API}/people/${id}`),
timestamp: Date.now(),
};
console.log(`${BASE_API}/people/${id}`);
}
return store[id].promise;
}
}
}
const studentService = studentServiceFactory({ ttl: 1000});
const resolvers = {
studentService: studentService,
};
// test program
(async () => {
const request1 = await resolvers.studentService.get({ id: 1 });
const request2 = await resolvers.studentService.get({ id: 1 });
// Both calls will return the same promise.
assert.equal(request1, request2);
// wait for resources to get stale
setTimeout(async() => {
const request3 = await resolvers.studentService.get({ id: 1 });
assert.notEqual(request1, request3);
}, 3000);
})();
Two requests are independent of each other. The only way to share data between two requests is to persist the data somewhere. It can be a file, database, etc. In your case, you can simply call the service function again in the other resolver.

How to load JS file in another JS file in Node

Cannot load JS file in my app (getting undefined) and I want to emulate the same effect as the tag in the plain HTML.
I have tried
import Api from './api' -> tells me that none of the defined function is a function (don't have any circular dependencies), so my best guess it that Api was not initalized or something?
Tried module.exports on Api -> tells me that Api is undefined
Tried exports.Api -> tells me that the function which i try to call from the Api is not a function
I tried to require and a few more things, which I cannot even recall, and none of it seems to be working. Main issue is that I don't recognize the format of the JS file in question since I never seen a variable declared as a function that contains other functions, so explanation on that might come in handy tbh.
var Api = (function() {
var requestPayload;
var responsePayload;
var messageEndpoint = '/api/message';
var sessionEndpoint = '/api/session';
var sessionId = null;
// Publicly accessible methods defined
return {
sendRequest: sendRequest,
getSessionId: getSessionId,
// The request/response getters/setters are defined here to prevent internal methods
// from calling the methods without any of the callbacks that are added elsewhere.
getRequestPayload: function() {
return requestPayload;
},
setRequestPayload: function(newPayloadStr) {
requestPayload = JSON.parse(newPayloadStr);
},
getResponsePayload: function() {
return responsePayload;
},
setResponsePayload: function(newPayloadStr) {
responsePayload = JSON.parse(newPayloadStr);
},
setErrorPayload: function() {
}
};
function getSessionId(callback) {
var http = new XMLHttpRequest();
http.open('GET', sessionEndpoint, true);
http.setRequestHeader('Content-type', 'application/json');
http.onreadystatechange = function () {
if (http.readyState === XMLHttpRequest.DONE) {
var res = JSON.parse(http.responseText);
sessionId = res.session_id;
callback();
}
};
http.send();
}
// Send a message request to the server
function sendRequest(text, context) {
// Build request payload
var payloadToWatson = {
session_id: sessionId
};
payloadToWatson.input = {
message_type: 'text',
text: text,
};
if (context) {
payloadToWatson.context = context;
}
// Built http request
var http = new XMLHttpRequest();
http.open('POST', messageEndpoint, true);
http.setRequestHeader('Content-type', 'application/json');
http.onreadystatechange = function() {
if (http.readyState === XMLHttpRequest.DONE && http.status === 200 && http.responseText) {
Api.setResponsePayload(http.responseText);
} else if (http.readyState === XMLHttpRequest.DONE && http.status !== 200) {
Api.setErrorPayload({
'output': {
'generic': [
{
'response_type': 'text',
'text': 'Something went wrong.'
}
],
}
});
}
};
var params = JSON.stringify(payloadToWatson);
// Stored in variable (publicly visible through Api.getRequestPayload)
// to be used throughout the application
if (Object.getOwnPropertyNames(payloadToWatson).length !== 0) {
Api.setRequestPayload(params);
}
http.send(params);
}
}());
Code above is provided by IBM (for the Watson Assistant I am trying to work with) and the code is for the Node.JS application which works fine.
It works fine since the code above is simply included in the app through the tag in their index.html and voila, it works, but I don't have that ability (read below).
My issue is that their app is also a client app and I want to transfer all of that 'back-end' stuff to my REST API and that is why I am trying to use the code above.
var Api = (function() {
var messageEndpoint = "/api/message";
// Publicly accessible methods defined
return {
messageEndpoint: messageEndpoint
};
})();
module.exports = Api ;
And you can use it like
const api = require("./api");
console.log(api);
So basically just add module.exports = Api ; in api file and you would be able to use it.

Historian for a particular participant

Is there any way in which I can get Historian for a particular participant in hyperledger-composer using node API?
I am developing an application based on hyperledger-composer using Node APIs.I want to show the history of transaction of a particular participant in his/her profile. I have created the permission.acl for that and that is working fine in playground. But when i am accessing the historian from node API it is giving complete historian of the network. I don't know how to filter that for a participant.
you can return results from REST API calls since v0.20 to the calling client application, so something like the following would work (not tested, but you get the idea). NOTE: You could just call the REST API end (/GET Trader) direct via REST with your parameter (or whatever endpoints you create for your own business network - the example below is trade-network), rather than the example of using 'READ-ONLY' Transaction processor Endpoint described below, for returning larger result sets to your client application. See more on this in the docs
NODE JS Client using APIs:
const BusinessNetworkConnection = require('composer-client').BusinessNetworkConnection;
const rp = require('request-promise');
this.bizNetworkConnection = new BusinessNetworkConnection();
this.cardName ='admin#mynet';
this.businessNetworkIdentifier = 'mynet';
this.bizNetworkConnection.connect(this.cardName)
.then((result) => {
//You can do ANYTHING HERE eg.
})
.catch((error) => {
throw error;
});
// set up my read only transaction object - find the history of a particular Participant - note it could equally be an Asset instead !
var obj = {
"$class": "org.example.trading.MyPartHistory",
"tradeId": "P1"
};
async function callPartHistory() {
var options = {
method: 'POST',
uri: 'http://localhost:3000/api/MyPartHistory',
body: obj,
json: true
};
let results = await rp(options);
// console.log("Return value from REST API is " + results);
console.log(" ");
console.log(`PARTICIPANT HISTORY for Asset ID: ${results[0].tradeId} is: `);
console.log("=============================================");
for (const part of results) {
console.log(`${part.tradeId} ${part.name}` );
}
}
// Main
callPartHistory();
//
MODEL FILE
#commit(false)
#returns(Trader[])
transaction MyPartHistory {
o String tradeId
}
READ-ONLY TRANSACTION PROCESSOR CODE (in 'logic.js') :
/**
* Sample read-only transaction
* #param {org.example.trading.MyPartHistory} tx
* #returns {org.example.trading.Trader[]} All trxns
* #transaction
*/
async function participantHistory(tx) {
const partId = tx.tradeid;
const nativeSupport = tx.nativeSupport;
// const partRegistry = await getParticipantRegistry('org.example.trading.Trader')
const nativeKey = getNativeAPI().createCompositeKey('Asset:org.example.trading.Trader', [partId]);
const iterator = await getNativeAPI().getHistoryForKey(nativeKey);
let results = [];
let res = {done : false};
while (!res.done) {
res = await iterator.next();
if (res && res.value && res.value.value) {
let val = res.value.value.toString('utf8');
if (val.length > 0) {
console.log("#debug val is " + val );
results.push(JSON.parse(val));
}
}
if (res && res.done) {
try {
iterator.close();
}
catch (err) {
}
}
}
var newArray = [];
for (const item of results) {
newArray.push(getSerializer().fromJSON(item));
}
console.log("#debug the results to be returned are as follows: ");
return newArray; // returns something to my NodeJS client (called via REST API)
}

Make Requests in Sequential Order Node.js

If I need to call 3 http API in sequential order, what would be a better alternative to the following code:
http.get({ host: 'www.example.com', path: '/api_1.php' }, function(res) {
res.on('data', function(d) {
http.get({ host: 'www.example.com', path: '/api_2.php' }, function(res) {
res.on('data', function(d) {
http.get({ host: 'www.example.com', path: '/api_3.php' }, function(res) {
res.on('data', function(d) {
});
});
}
});
});
}
});
});
}
Using deferreds like Futures.
var sequence = Futures.sequence();
sequence
.then(function(next) {
http.get({}, next);
})
.then(function(next, res) {
res.on("data", next);
})
.then(function(next, d) {
http.get({}, next);
})
.then(function(next, res) {
...
})
If you need to pass scope along then just do something like this
.then(function(next, d) {
http.get({}, function(res) {
next(res, d);
});
})
.then(function(next, res, d) { })
...
})
I like Raynos' solution as well, but I prefer a different flow control library.
https://github.com/caolan/async
Depending on whether you need the results in each subsequent function, I'd either use series, parallel, or waterfall.
Series when they have to be serially executed, but you don't necessarily need the results in each subsequent function call.
Parallel if they can be executed in parallel, you don't need the results from each during each parallel function, and you need a callback when all have completed.
Waterfall if you want to morph the results in each function and pass to the next
endpoints =
[{ host: 'www.example.com', path: '/api_1.php' },
{ host: 'www.example.com', path: '/api_2.php' },
{ host: 'www.example.com', path: '/api_3.php' }];
async.mapSeries(endpoints, http.get, function(results){
// Array of results
});
sync-request
By far the most easiest one I've found and used is sync-request and it supports both node and the browser!
var request = require('sync-request');
var res = request('GET', 'http://google.com');
console.log(res.body.toString('utf-8'));
That's it, no crazy configuration, no complex lib installs, although it does have a lib fallback. Just works. I've tried other examples here and was stumped when there was much extra setup to do or installs didn't work!
Notes:
The example that sync-request uses doesn't play nice when you use res.getBody(), all get body does is accept an encoding and convert the response data. Just do res.body.toString(encoding) instead.
You could do this using my Common Node library:
function get(url) {
return new (require('httpclient').HttpClient)({
method: 'GET',
url: url
}).finish().body.read().decodeToString();
}
var a = get('www.example.com/api_1.php'),
b = get('www.example.com/api_2.php'),
c = get('www.example.com/api_3.php');
I'd use a recursive function with a list of apis
var APIs = [ '/api_1.php', '/api_2.php', '/api_3.php' ];
var host = 'www.example.com';
function callAPIs ( host, APIs ) {
var API = APIs.shift();
http.get({ host: host, path: API }, function(res) {
var body = '';
res.on('data', function (d) {
body += d;
});
res.on('end', function () {
if( APIs.length ) {
callAPIs ( host, APIs );
}
});
});
}
callAPIs( host, APIs );
edit: request version
var request = require('request');
var APIs = [ '/api_1.php', '/api_2.php', '/api_3.php' ];
var host = 'www.example.com';
var APIs = APIs.map(function (api) {
return 'http://' + host + api;
});
function callAPIs ( host, APIs ) {
var API = APIs.shift();
request(API, function(err, res, body) {
if( APIs.length ) {
callAPIs ( host, APIs );
}
});
}
callAPIs( host, APIs );
edit: request/async version
var request = require('request');
var async = require('async');
var APIs = [ '/api_1.php', '/api_2.php', '/api_3.php' ];
var host = 'www.example.com';
var APIs = APIs.map(function (api) {
return 'http://' + host + api;
});
async.eachSeries(function (API, cb) {
request(API, function (err, res, body) {
cb(err);
});
}, function (err) {
//called when all done, or error occurs
});
As of 2018 and using ES6 modules and Promises, we can write a function like that :
import { get } from 'http';
export const fetch = (url) => new Promise((resolve, reject) => {
get(url, (res) => {
let data = '';
res.on('end', () => resolve(data));
res.on('data', (buf) => data += buf.toString());
})
.on('error', e => reject(e));
});
and then in another module
let data;
data = await fetch('http://www.example.com/api_1.php');
// do something with data...
data = await fetch('http://www.example.com/api_2.php');
// do something with data
data = await fetch('http://www.example.com/api_3.php');
// do something with data
The code needs to be executed in an asynchronous context (using async keyword)
Another possibility is to set up a callback that tracks completed tasks:
function onApiResults(requestId, response, results) {
requestsCompleted |= requestId;
switch(requestId) {
case REQUEST_API1:
...
[Call API2]
break;
case REQUEST_API2:
...
[Call API3]
break;
case REQUEST_API3:
...
break;
}
if(requestId == requestsNeeded)
response.end();
}
Then simply assign an ID to each and you can set up your requirements for which tasks must be completed before closing the connection.
const var REQUEST_API1 = 0x01;
const var REQUEST_API2 = 0x02;
const var REQUEST_API3 = 0x03;
const var requestsNeeded = REQUEST_API1 | REQUEST_API2 | REQUEST_API3;
Okay, it's not pretty. It is just another way to make sequential calls. It's unfortunate that NodeJS does not provide the most basic synchronous calls. But I understand what the lure is to asynchronicity.
It seems solutions for this problem is never-ending, here's one more :)
// do it once.
sync(fs, 'readFile')
// now use it anywhere in both sync or async ways.
var data = fs.readFile(__filename, 'utf8')
http://alexeypetrushin.github.com/synchronize
use sequenty.
sudo npm install sequenty
or
https://github.com/AndyShin/sequenty
very simple.
var sequenty = require('sequenty');
function f1(cb) // cb: callback by sequenty
{
console.log("I'm f1");
cb(); // please call this after finshed
}
function f2(cb)
{
console.log("I'm f2");
cb();
}
sequenty.run([f1, f2]);
also you can use a loop like this:
var f = [];
var queries = [ "select .. blah blah", "update blah blah", ...];
for (var i = 0; i < queries.length; i++)
{
f[i] = function(cb, funcIndex) // sequenty gives you cb and funcIndex
{
db.query(queries[funcIndex], function(err, info)
{
cb(); // must be called
});
}
}
sequenty.run(f); // fire!
Using the request library can help minimize the cruft:
var request = require('request')
request({ uri: 'http://api.com/1' }, function(err, response, body){
// use body
request({ uri: 'http://api.com/2' }, function(err, response, body){
// use body
request({ uri: 'http://api.com/3' }, function(err, response, body){
// use body
})
})
})
But for maximum awesomeness you should try some control-flow library like Step - it will also allow you to parallelize requests, assuming that it's acceptable:
var request = require('request')
var Step = require('step')
// request returns body as 3rd argument
// we have to move it so it works with Step :(
request.getBody = function(o, cb){
request(o, function(err, resp, body){
cb(err, body)
})
}
Step(
function getData(){
request.getBody({ uri: 'http://api.com/?method=1' }, this.parallel())
request.getBody({ uri: 'http://api.com/?method=2' }, this.parallel())
request.getBody({ uri: 'http://api.com/?method=3' }, this.parallel())
},
function doStuff(err, r1, r2, r3){
console.log(r1,r2,r3)
}
)
There are lots of control flow libraries -- I like conseq (... because I wrote it.) Also, on('data') can fire several times, so use a REST wrapper library like restler.
Seq()
.seq(function () {
rest.get('http://www.example.com/api_1.php').on('complete', this.next);
})
.seq(function (d1) {
this.d1 = d1;
rest.get('http://www.example.com/api_2.php').on('complete', this.next);
})
.seq(function (d2) {
this.d2 = d2;
rest.get('http://www.example.com/api_3.php').on('complete', this.next);
})
.seq(function (d3) {
// use this.d1, this.d2, d3
})
This has been answered well by Raynos. Yet there have been changes in the sequence library since the answer has been posted.
To get sequence working, follow this link: https://github.com/FuturesJS/sequence/tree/9daf0000289954b85c0925119821752fbfb3521e.
This is how you can get it working after npm install sequence:
var seq = require('sequence').Sequence;
var sequence = seq.create();
seq.then(function call 1).then(function call 2);
Here's my version of #andy-shin sequently with arguments in array instead of index:
function run(funcs, args) {
var i = 0;
var recursive = function() {
funcs[i](function() {
i++;
if (i < funcs.length)
recursive();
}, args[i]);
};
recursive();
}
...4 years later...
Here is an original solution with the framework Danf (you don't need any code for this kind of things, only some config):
// config/common/config/sequences.js
'use strict';
module.exports = {
executeMySyncQueries: {
operations: [
{
order: 0,
service: 'danf:http.router',
method: 'follow',
arguments: [
'www.example.com/api_1.php',
'GET'
],
scope: 'response1'
},
{
order: 1,
service: 'danf:http.router',
method: 'follow',
arguments: [
'www.example.com/api_2.php',
'GET'
],
scope: 'response2'
},
{
order: 2,
service: 'danf:http.router',
method: 'follow',
arguments: [
'www.example.com/api_3.php',
'GET'
],
scope: 'response3'
}
]
}
};
Use the same order value for operations you want to be executed in parallel.
If you want to be even shorter, you can use a collection process:
// config/common/config/sequences.js
'use strict';
module.exports = {
executeMySyncQueries: {
operations: [
{
service: 'danf:http.router',
method: 'follow',
// Process the operation on each item
// of the following collection.
collection: {
// Define the input collection.
input: [
'www.example.com/api_1.php',
'www.example.com/api_2.php',
'www.example.com/api_3.php'
],
// Define the async method used.
// You can specify any collection method
// of the async lib.
// '--' is a shorcut for 'forEachOfSeries'
// which is an execution in series.
method: '--'
},
arguments: [
// Resolve reference '##.##' in the context
// of the input item.
'##.##',
'GET'
],
// Set the responses in the property 'responses'
// of the stream.
scope: 'responses'
}
]
}
};
Take a look at the overview of the framework for more informations.
I landed here because I needed to rate-limit http.request (~10k aggregation queries to elastic search to build an analytical report). The following just choked my machine.
for (item in set) {
http.request(... + item + ...);
}
My URLs are very simple so this may not trivially apply to the original question but I think it's both potentially applicable and worth writing here for readers that land here with issues similar to mine and who want a trivial JavaScript no-library solution.
My job wasn't order dependent and my first approach to bodging this was to wrap it in a shell script to chunk it (because I'm new to JavaScript). That was functional but not satisfactory. My JavaScript resolution in the end was to do the following:
var stack=[];
stack.push('BOTTOM');
function get_top() {
var top = stack.pop();
if (top != 'BOTTOM')
collect(top);
}
function collect(item) {
http.request( ... + item + ...
result.on('end', function() {
...
get_top();
});
);
}
for (item in set) {
stack.push(item);
}
get_top();
It looks like mutual recursion between collect and get_top. I'm not sure it is in effect because the system is asynchronous and the function collect completes with a callback stashed for the event at on.('end'.
I think it is general enough to apply to the original question. If, like my scenario, the sequence/set is known, all URLs/keys can be pushed on the stack in one step. If they are calculated as you go, the on('end' function can push the next url on the stack just before get_top(). If anything, the result has less nesting and might be easier to refactor when the API you're calling changes.
I realise this is effectively equivalent to the #generalhenry's simple recursive version above (so I upvoted that!)
Super Request
This is another synchronous module that is based off of request and uses promises. Super simple to use, works well with mocha tests.
npm install super-request
request("http://domain.com")
.post("/login")
.form({username: "username", password: "password"})
.expect(200)
.expect({loggedIn: true})
.end() //this request is done
//now start a new one in the same session
.get("/some/protected/route")
.expect(200, {hello: "world"})
.end(function(err){
if(err){
throw err;
}
});
This code can be used to execute an array of promises synchronously & sequentially after which you can execute your final code in the .then() call.
const allTasks = [() => promise1, () => promise2, () => promise3];
function executePromisesSync(tasks) {
return tasks.reduce((task, nextTask) => task.then(nextTask), Promise.resolve());
}
executePromisesSync(allTasks).then(
result => console.log(result),
error => console.error(error)
);
I actually got exactly what you (and me) wanted, without the use of await, Promises, or inclusions of any (external) library (except our own).
Here's how to do it:
We're going to make a C++ module to go with node.js, and that C++ module function will make the HTTP request and return the data as a string, and you can use that directly by doing:
var myData = newModule.get(url);
ARE YOU READY to get started?
Step 1:
make a new folder somewhere else on your computer, we're only using this folder to build the module.node file (compiled from C++), you can move it later.
In the new folder (I put mine in mynewFolder/src for organize-ness):
npm init
then
npm install node-gyp -g
now make 2 new files:
1, called something.cpp and for put this code in it (or modify it if you want):
#pragma comment(lib, "urlmon.lib")
#include <sstream>
#include <WTypes.h>
#include <node.h>
#include <urlmon.h>
#include <iostream>
using namespace std;
using namespace v8;
Local<Value> S(const char* inp, Isolate* is) {
return String::NewFromUtf8(
is,
inp,
NewStringType::kNormal
).ToLocalChecked();
}
Local<Value> N(double inp, Isolate* is) {
return Number::New(
is,
inp
);
}
const char* stdStr(Local<Value> str, Isolate* is) {
String::Utf8Value val(is, str);
return *val;
}
double num(Local<Value> inp) {
return inp.As<Number>()->Value();
}
Local<Value> str(Local<Value> inp) {
return inp.As<String>();
}
Local<Value> get(const char* url, Isolate* is) {
IStream* stream;
HRESULT res = URLOpenBlockingStream(0, url, &stream, 0, 0);
char buffer[100];
unsigned long bytesReadSoFar;
stringstream ss;
stream->Read(buffer, 100, &bytesReadSoFar);
while(bytesReadSoFar > 0U) {
ss.write(buffer, (long long) bytesReadSoFar);
stream->Read(buffer, 100, &bytesReadSoFar);
}
stream->Release();
const string tmp = ss.str();
const char* cstr = tmp.c_str();
return S(cstr, is);
}
void Hello(const FunctionCallbackInfo<Value>& arguments) {
cout << "Yo there!!" << endl;
Isolate* is = arguments.GetIsolate();
Local<Context> ctx = is->GetCurrentContext();
const char* url = stdStr(arguments[0], is);
Local<Value> pg = get(url,is);
Local<Object> obj = Object::New(is);
obj->Set(ctx,
S("result",is),
pg
);
arguments.GetReturnValue().Set(
obj
);
}
void Init(Local<Object> exports) {
NODE_SET_METHOD(exports, "get", Hello);
}
NODE_MODULE(cobypp, Init);
Now make a new file in the same directory called something.gyp and put (something like) this in it:
{
"targets": [
{
"target_name": "cobypp",
"sources": [ "src/cobypp.cpp" ]
}
]
}
Now in the package.json file, add: "gypfile": true,
Now: in the console, node-gyp rebuild
If it goes through the whole command and says "ok" at the end with no errors, you're (almost) good to go, if not, then leave a comment..
But if it works then go to build/Release/cobypp.node (or whatever its called for you), copy it into your main node.js folder, then in node.js:
var myCPP = require("./cobypp")
var myData = myCPP.get("http://google.com").result;
console.log(myData);
..
response.end(myData);//or whatever

Resources