How do I get the data from a https request outside of its scope?
Update
I've seen Where is body in a nodejs http.get response?, but it doesn't answer this question. In fact, that question isn't answered accurately, either. In the accepted answer (posted by the asker), a third party library is used. Since the library returns an object different from that returned by http.get() it doesn't answer the question.
I tried to set a variable to the return value of http.get() using await, but that returns a http.clientRequest and doesn't give me access to the response data that I need.
I'm using Node v8.9.4 with Express and the https module to request data from Google's Custom Search.
I have two routes. One for a GET request and one for a POST request used when submitting a form on the front page. They both basically serve the same purpose... request the data from CSE and present the data as a simple JSON string. Rather than repeat myself, I want to put my code for the CSE request into a function and just call the function within the callback for either route.
I thought about returning all the way up from the innermost callback, but that won't work because it wouldn't get to the request's error event handler or the necessary .end() call.
Here's a subset of the actual code:
app.get('/api/imagesearch/:query', newQuery)
app.post('/', newQuery)
function newQuery (req, res) {
let query = req.body.query || req.params.query
console.log(`Search Query: ${query}`)
res.status(200)
res.set('Content-Type', 'application/json')
// This doesn't work
let searchResults = JSON.stringify(cseSearch(req))
res.end(searchResults)
}
function cseSearch (request) {
let cseParams = '' +
`?q=${request.params.query}` +
`&cx=${process.env.CSE_ID}` +
`&key=${process.env.API_KEY}` +
'&num=10' +
'&safe=high' +
'&searchType=image' +
`&start=${request.query.offset || 1}`
let options = {
hostname: 'www.googleapis.com',
path: '/customsearch/v1' + encodeURI(cseParams)
}
let cseRequest = https.request(options, cseResponse => {
let jsonString = ''
let searchResults = []
cseResponse.on('data', data => {
jsonString += data
})
cseResponse.on('end', () => {
let cseResult = JSON.parse(jsonString)
let items = cseResult.items
items.map(item => {
let resultItem = {
url: item.link,
snippet: item.title,
thumbnail: item.image.thumbnailLink,
context: item.image.contextLink
}
searchResults.push(resultItem)
})
// This doesn't work... wrong scope, two callbacks deep
return searchResults
})
})
cseRequest.on('error', e => {
console.log(e)
})
cseRequest.end()
}
If you're curious, it's for a freeCodeCamp project: Image Search Abstraction Layer
using promise method solve this issue.
cseSearch(req).then(searchResults=>{
res.end(searchResults)
}).catch(err=>{
res.status(500).end(searchResults)
})
function cseSearch (request) {
return new Promise((resolve, reject)=>{
...your http request code
cseResponse.on('end', () => {
let cseResult = JSON.parse(jsonString)
let items = cseResult.items
items.map(item => {
let resultItem = {
url: item.link,
snippet: item.title,
thumbnail: item.image.thumbnailLink,
context: item.image.contextLink
}
searchResults.push(resultItem)
})
resolve(searchResults);
})
})
}
Based on what I explained in the comments, to give you an idea how compact your code could be using the request-promise library, here's what you could use:
const rp = require('request-promise-native');
app.get('/api/imagesearch/:query', newQuery)
app.post('/', newQuery)
function newQuery (req, res) {
let query = req.body.query || req.params.query
console.log(`Search Query: ${query}`)
cseSearch(req).then(results => {
res.json(results);
}).catch(err => {
console.log("newQueryError ", err);
res.sendStatus(500);
});
}
function cseSearch (request) {
let cseParams = '' +
`?q=${request.params.query}` +
`&cx=${process.env.CSE_ID}` +
`&key=${process.env.API_KEY}` +
'&num=10' +
'&safe=high' +
'&searchType=image' +
`&start=${request.query.offset || 1}`
let options = {
hostname: 'www.googleapis.com',
path: '/customsearch/v1' + encodeURI(cseParams),
json: true
};
return rp(options).then(data => {
return data.items.map(item => {
return {
url: item.link,
snippet: item.title,
thumbnail: item.image.thumbnailLink,
context: item.image.contextLink
};
});
});
Related
I have a kibana instance behind a nodejs 16.x aws lambda reverse proxy. It works fine for almost everything, except for the "Discover" section when I add 2 or more filters for 2+ months timerange, it gives back bad gateway error.
Detail of the error:
Wrapper#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:4249
_createSuperInternal#https://mydomain/_dashboards/909221/bundles/core/core.entry.js:6:3388
...
...
Implementation:
function proxy(event, context, lambdaCallback) {
delete(event.headers["accept-encoding"])
var path = event.path;
if (event.multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(event.multiValueQueryStringParameters)
}
// Calculate the options for the HTTPS request
var opts = {
host: my_es_endpoint,
path: path,
method: event.httpMethod,
service: 'es',
region: my_region,
headers: event.headers
}
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
opts.body = payload
}
}
// Use aws4 to sign the request so we can talk with ElasticSearch directly
aws4.sign(opts);
const req = https.request(opts, function (res) {
var bodyParts = [];
// We need to read all the incoming data
res.on('data', (chunk) => {
bodyParts.push(chunk)
});
res.on("end", () => {
// We re-create the read content
var body = Buffer.concat(bodyParts).toString()
// We send back uncompressed data
delete(res.headers['content-encoding'])
res.headers['X-Frame-Options'] = 'DENY'
// res.headers['content-security-policy'] = "default-src 'self'; frame-ancestors 'none'"
res.headers['X-XSS-Protection'] = '1; mode=block'
res.headers['X-Content-Type-Options'] = 'nosniff'
var response = {
statusCode: res.statusCode,
body: body,
headers: res.headers
}
lambdaCallback(null, response);
})
});
req.on('error', (e) => {
console.log(`Error caught when calling ElasticSearch: ${e}`)
})
// For POST/PUT request, we send the content of the paylod as well
if ((event.httpMethod == 'POST') || (event.httpMethod == 'PUT')) {
if (event.body) {
var buff = new Buffer(event.body, "base64")
var payload = buff.toString('utf-8')
req.write(payload)
}
}
req.end();
return req;
}
I tried:
Incraesing timeout on request
Increase lambda memory from 128 to 512
Increase lambda timeout
I think you can try is to increase the memory allocation for the Lambda function. it might resolve it.
Or for better and just remove all your headaches. You should use an Amazon API Gateway instead of a reverse proxy in a Lambda function to proxy requests to Kibana. You can also increase the timeout for Kibana requests. this will be more efficient and scalable to your project and less issues.
I try to optimized your code can you check this if it will improve or correct your issue?
const https = require('https');
const aws4 = require('aws4');
function deserializeQueryString(query) {
const params = new URLSearchParams();
for (const key of Object.keys(query)) {
for (const value of query[key]) {
params.append(key, value);
}
}
return params.toString();
}
exports.handler = async (event, context, lambdaCallback) => {
const { headers, path, httpMethod, multiValueQueryStringParameters, body } = event;
// Remove accept-encoding header
delete headers['accept-encoding'];
// Append query string parameters to the path
if (multiValueQueryStringParameters) {
path += '?' + deserializeQueryString(multiValueQueryStringParameters);
}
const opts = {
host: my_es_endpoint,
path,
method: httpMethod,
service: 'es',
region: my_region,
headers
};
// Add request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
opts.body = Buffer.from(body, 'base64').toString('utf-8');
}
// Sign the request using aws4
aws4.sign(opts);
return new Promise((resolve, reject) => {
const req = https.request(opts, res => {
let body = '';
res.on('data', chunk => {
body += chunk;
});
res.on('end', () => {
// Remove content-encoding header
delete res.headers['content-encoding'];
// Set security headers
res.headers['X-Frame-Options'] = 'DENY';
res.headers['X-XSS-Protection'] = '1; mode=block';
res.headers['X-Content-Type-Options'] = 'nosniff';
const response = {
statusCode: res.statusCode,
body,
headers: res.headers
};
resolve(response);
});
});
req.on('error', error => {
console.error(`Error caught when calling ElasticSearch: ${error}`);
reject(error);
});
// Write request body for POST and PUT methods
if (['POST', 'PUT'].includes(httpMethod) && body) {
req.write(opts.body);
}
req.end();
}).then(lambdaCallback)
.catch(error => {
lambdaCallback(error);
});
};
I am not sure if the error is related to the proxy implementation, however I found something that helped and now I am not experiencing the problem anymore.
I reduced the value of discover:sampleSize in Kibana Advanced Settings as suggested here
I'm a non-professional using nodejs server (backend) and javascript/html (frontend) to fetch data from two API's: one API gives a response and I use an ID from the first API to fetch data from the other API. The API returns XML so I use XML2Json and JSON.parse to get the Javascript Object.
everything works perfect until I get to the "return nestedFunction(new_details")-function in the second API-call
so this is where the results are sent back to the client
I do it for the first API and it works fine (backend + frontend)
I tried Async/await but the problem isn't solved
I get the error: "UnhandledPromiseRejectionWarning: TypeError: nestedFunction is not a function"
What could be the problem?
app.get('/AATGetTermMatch', function(req,res) {
let termMatch = req.query.term;
let termLogop = req.query.logop;
let termNotes = req.query.notes;
AATGetTermMatch(termMatch, termLogop, termNotes, function (conceptResults) {
res.send(conceptResults);
});
});
function AATGetTermMatch(termMatch, termLogop, termNotes, nestedFunction) {
let URL = baseURL + "AATGetTermMatch?term=" + termMatch + "&logop=" + termLogop + "¬es=" + termNotes;
fetch(URL)
.then(function (response){
return response.text();
}).then(function (response) {
APICallResults = response;
parseJson();
let objectAPI = JSON.parse(APICallResults);
let full_Concepts = objectAPI.Vocabulary.Subject;
let i;
for (i = 0; i < full_Concepts.length; i++) {
global.ID = full_Concepts[i].Subject_ID;
searchTermDetails(global.ID);
} return nestedFunction(full_Concepts);
});
}
app.get('/subjectID', function(req, res) {
let selectedID = req.query.subjectID;
searchTermDetails(selectedID, function (termDetails) {
res.json(termDetails);
});
});
2nd API : http://vocabsservices.getty.edu/AATService.asmx/AATGetSubject?subjectID=300004838
function searchTermDetails(selectedID, nestedFunction) {
selectedID = global.ID;
let URL_Details = baseURL + "AATGetSubject?" + "subjectID=" + selectedID;
fetch(URL_Details)
.then(function (response) {
return response.text();
}).then(function (response) {
APICallResults_new = response;
parseJsonAgain();
let detailAPI = JSON.parse(APICallResults_new);
let new_details = detailAPI.Vocabulary.Subject;
let Subject_ID = new_details[0].Subject_ID;
let descriptive_Notes_English = new_details[0].Descriptive_Notes[0].Descriptive_Note[0].Note_Text;
} **return nestedFunction(new_details);**
}).catch(function (error) {
console.log("error");
});
}
function parseJson() {
xml2js.parseString(APICallResults, {object: true, trim:true, sanitize: true, arrayNotation: true, mergeAttrs: true}, (err, result) => {
if (err) {
throw err;
}
const resultJson = JSON.stringify(result, null, 4);
//JSON.parse(resultJson);
APICallResults = resultJson;
});
}
function parseJsonAgain() {
xml2js.parseString(APICallResults_new, {object: true, trim:true, sanitize: true, arrayNotation: true, mergeAttrs: true}, (err, result) => {
if (err) {
throw err;
}
const resultJsonAgain = JSON.stringify(result, null, 4);
APICallResults_new = resultJsonAgain;
//console.log(APICallResults_new);
});
}
I've read many threads about this error but the proposed solutions don't seem to work.
In here:
for (i = 0; i < full_Concepts.length; i++) {
global.ID = full_Concepts[i].Subject_ID;
searchTermDetails(global.ID);
}
where you call searchTermDetails(), you are not passing the nestedFunction second argument. Thus, when searchTermDetails() tries to use that argument, it causes the error you get.
You can either add the callback to this call or, if the callback could be optional, then you can modify searchTermDetails to check to see if the second argument is a function and, if not, then don't try to call it.
I am trying to call one async function from inside a loop run by another async function. These functions call APIs and I am using request-promise using nodeJS.
functions.js file
const rp = require("request-promise");
// function (1)
async email_views: emailId => {
let data = {};
await rp({
url: 'myapiurl',
qs: { accessToken: 'xyz', emailID: emailId },
method: 'GET'
})
.then( body => { data = JSON.parse(body) })
.catch( error => { console.log(error} );
return data;
};
The above JSON looks like this:
...
data:{
records: [
{
...
contactID: 123456,
...
},
{
...
contactID: 456789,
...
}
]
}
...
I am running a loop to get individual record, where I am getting a contactID associated with each of them.
// function#2 (also in functions.js file)
async contact_detail: contactId => {
let data = {};
await rp({
url: 'myapiurl2',
qs: { accessToken: 'xyz', contactID: contactId },
method: 'GET'
})
.then( body => { data = JSON.parse(body) })
.catch( error => { console.log(error} );
return data;
};
The above function takes one contactId as parameter and gets that contact's detail calling another API endpoint.
Both functions work fine when they are called separately. But I am trying to do it inside a loop like this:
...
const result = await email_views(99999); // function#1
const records = result.data.records;
...
let names = "";
for( let i=0; i<records.length; i++) {
...
const cId = records[i].contactID;
const contact = await contact_detail(cId); // function#2
names += contact.data.firstName + " " + contact.data.lastName + " ";
...
}
console.log(names);
...
The problem is I am only getting the first contact back from the above code block, i.e. even I have 20 records from function#1, in the loop when I am calling contact_detail (function#2) for each contactID (cId), I get contact detail once, i.e. for the first cId only. For rest I get nothing!
What is the correct way to achieve this using nodeJs?
UPDATE:
const { App } = require("jovo-framework");
const { Alexa } = require("jovo-platform-alexa");
const { GoogleAssistant } = require("jovo-platform-googleassistant");
const { JovoDebugger } = require("jovo-plugin-debugger");
const { FileDb } = require("jovo-db-filedb");
const custom = require("./functions");
const menuop = require("./menu");
const stateus = require("./stateus");
const alexaSpeeches = require("./default_speech");
const app = new App();
app.use(new Alexa(), new GoogleAssistant(), new JovoDebugger(), new FileDb());
let sp = "";
async EmailViewsByContactIntent() {
try {
const viewEmailId =
this.$session.$data.viewEmailIdSessionKey != null
? this.$session.$data.viewEmailIdSessionKey
: this.$inputs.view_email_Id_Number.value;
let pageIndex =
this.$session.$data.viewEmailPageIndex != null
? this.$session.$data.viewEmailPageIndex
: 1;
const result = await custom.email_views_by_emailId(
viewEmailId,
pageIndex
);
const records = result.data.records;
if (records.length > 0) {
const totalRecords = result.data.paging.totalRecords;
this.$session.$data.viewEmailTotalPages = totalRecords;
sp = `i have found a total of ${totalRecords} following view records. `;
if (totalRecords > 5) {
sp += `i will tell you 5 records at a time. for next 5 records, please say, next. `;
this.$session.$data.viewEmailIdSessionKey = this.$inputs.view_email_Id_Number.value;
this.$session.$data.viewEmailPageIndex++;
}
for (let i = 0; i < records.length; i++) {
const r = records[i];
/* Here I want to pass r.contactID as contactId in the function contact_detail like this: */
const contact = await custom.contact_detail(r.contactID);
const contact_name = contact.data.firstName + " " + contact.data.lastName;
/* The above two lines of code fetch contact_name for the first r.contactID and for the rest I get an empty string only. */
const formatted_date = r.date.split(" ")[0];
sp += `contact ID ${spellOut_speech_builder(
r.contactID
)} had viewed on ${formatted_date} from IP address ${
r.ipAddress
}. name of contact is, ${contact_name}. `;
}
if (totalRecords > 5) {
sp += ` please say, next, for next 5 records. `;
}
} else {
sp = ``;
}
this.ask(sp);
} catch (e) {
this.tell(e);
}
}
I am building an alexa skill using JOVO framework and nodeJS.
UPDATE #2
As a test, I only returned the contactId which I am passing to the contact_detail function and I am getting the correct value back to the above code under my first UPDATE.
async contact_detail: contactId => {
return contactId;
}
It seems even after getting the value right, the function is somehow failing to execute. However, the same contact_detail function works perfectly OK, when I am calling it from another place. Only doesn't not work inside a loop.
What could be the reason?
I must be missing something but don't know what!
You are mixing async await and promises together which is causing you confusion. You typically would use one of the other(as async await effectivly provides syntax sugar so you can avoid dealing with the verbose promise code) in a given location.
Because you mixed the two you are in a weird area where the behavior is harder to nail down.
If you want to use async await your functions should look like
async contact_detail: contactId => {
try {
const body = await rp({
url: 'myapiurl2',
qs: { ... }
});
return JSON.parse(body);
} catch(e) {
console.log(e);
//This will return undefined in exception cases. You may want to catch at a higher level.
}
};
or with promises
async contact_detail: contactId => {
return rp({
url: 'myapiurl2',
qs: { ... }
})
.then( body => JSON.parse(body))
.catch( error => {
console.log(error);
//This will return undefined in exception cases. You probably dont want to catch here.
});
};
Keep in mind your current code executing the function will do each call in series. If you want to do them in parallel you will need to call the function a bit differently and use something like Promise.all to resolve the result.
Here you go:
...
const result = await email_views(99999); // function#1
const records = result.data.records;
...
let names = "";
await Promise.all(records.map(async record => {
let cId = record.contactID;
let contact = await contact_detail(cId);
names += contact.data.firstName + " " + contact.data.lastName + " ";
});
console.log(names);
...
I'm posting this as an answer only because I need to show you some multi-line code as part of throubleshooting this. Not sure this solves your issue yet, but it is a problem.
Your contact_detail() function is not properly returning errors. Instead, it eats the error and resolves with an empty object. That could be what is causing your blank names. It should just return the promise directly and if you want to log the error, then it needs to rethrow. Also, there's no reason for it to be declared async or to use await. You can just return the promise directly. You can also let request-promise parts the JSON response for you too.
Also, I notice, there appears to be a syntax error in your .catch() which could also be part of the problem.
contact_detail: contactId => {
return rp({
url: 'myapiurl2',
qs: { accessToken: 'xyz', contactID: contactId },
json: true,
method: 'GET'
}).catch( error => {
// log error and rethrow so any error propagates
console.log(error);
throw error;
});
};
Then, you would call this like you originally were (note you still use await when calling it because it returns a promise):
...
const result = await email_views(99999); // function#1
const records = result.data.records;
...
let names = "";
for( let i=0; i<records.length; i++) {
...
const cId = records[i].contactID;
const contact = await contact_detail(cId);
names += contact.data.firstName + " " + contact.data.lastName + " ";
...
}
console.log(names);
...
I am having difficulty getting my LatLon look up to work - I have read
Get Google Maps Geocoding JSON from Express - but that just says use HTTP...and I have read the docs on http/https - but I'm still getting an error.
Here is my code - so calling myUrl/LatLon should give me the Google API response - or at least that is what I want...
const https = require('https');
router.get( '/LatLon', ( res ) => {console.log('Here getting https');
const googleKey = '---';
const address = '1600 Amphitheatre Parkway, Mountain View, CA';
const options = new URL('https://maps.googleapis.com/maps/api/geocode/json?address=' + address + '&key=' + googleKey);
const req = https.request(options, (res) => {
res.on('data', (chunk) => {
console.log(`BODY: ${chunk}`);
});
res.on('end', () => {
console.log('No more data in response.');
});
});
req.on('error', (e) => {
console.error(`problem with request: ${e.message}`);
});
req.write();
req.end();
});
I get this error -
TypeError [ERR_INVALID_ARG_TYPE]: The first argument must be one of type string or Buffer. Received type undefined
at write_ (_http_outgoing.js:595:11)
at ClientRequest.write (_http_outgoing.js:567:10)
Any help would be greatly appreciated - I have tried about 4 variations on using "get" or "https"...
I found node-geocoder - and it worked great...
Basically I did this, it is 'generalized code', non-functional; but you'll get the idea.
A bunch of checks and compares went into it so I am not hitting API's when I do not need to.
var NodeGeocoder = require('node-geocoder');
var options = {
provider: process.env.GEOCODING_PROVIDER,
httpAdapter: 'https',
apiKey: process.env.GEOCODING_KEY,
formatter: null
};
var geocoder = NodeGeocoder(options);
collection.getExistingId( req.params.id, ( err, record ) => {
const existingAddress = addresstoString(record.address);
const newAddress = addresstoString(newRecord.address);
if ( !compareAddresses(existingAddress,newAddress) ) {
geocoder.geocode(newAddress, function(err, geocode) {
let coords = []; // mongoDB wants [Long,Lat]
coords[0] = geocode[0].longitude;
coords[1] = geocode[0].latitude;
// set existingAddress.COORDINATES = coords
// save the record
});
}
});
I've noticed that I'm writing http://localhost everytime I want to run a node test with superagent.
import superagent from 'superagent';
const request = superagent.agent();
request
.get('http://localhost/whatever')
.end((err, res) => { ... });
Is there any way of avoiding the localhost part?
As far as I've gone is to avoid the request being hardcoded to the host:
const baseUrl = 'http://localhost:3030';
request
.get(`${baseUrl}/whatever`)
But I still have to carry the baseUrl with the agent everytime.
While not so recently updated a package as superagent-absolute, superagent-prefix is officially recommended, and has the highest adoption as of 2020.
It is such a simple package that I would not be concerned with the lack of updates.
Example usage:
import superagent from "superagent"
import prefix from "superagent-prefix"
const baseURL = "https://foo.bar/api/"
const client = superagent.use(prefix(baseURL))
TL;DR: superagent-absolute does exactly that.
Detailed:
You can create one abstraction layer on top of superagent.
function superagentAbsolute(agent) {
return baseUrl => ({
get: url => url.startsWith('/') ? agent.get(baseUrl + url) : agent.get(url),
});
}
⬑ That would override the agent.get when called with a starting /
global.request = superagentAbsolute(agent)('http://localhost:3030');
Now you would need to do the same for: DELETE, HEAD, PATCH, POST and PUT.
https://github.com/zurfyx/superagent-absolute/blob/master/index.js
const OVERRIDE = 'delete,get,head,patch,post,put'.split(',');
const superagentAbsolute = agent => baseUrl => (
new Proxy(agent, {
get(target, propertyName) {
return (...params) => {
if (OVERRIDE.indexOf(propertyName) !== -1
&& params.length > 0
&& typeof params[0] === 'string'
&& params[0].startsWith('/')) {
const absoluteUrl = baseUrl + params[0];
return target[propertyName](absoluteUrl, ...params.slice(1));
}
return target[propertyName](...params);
};
},
})
);
Or you can simply use superagent-absolute.
const superagent = require('superagent');
const superagentAbsolute = require('superagent-absolute');
const agent = superagent.agent();
const request = superagentAbsolute(agent)('http://localhost:3030');
it('should should display "It works!"', (done) => {
request
.get('/') // Requests "http://localhost:3030/".
.end((err, res) => {
expect(res.status).to.equal(200);
expect(res.body).to.eql({ msg: 'It works!' });
done();
});
});